[ 651.147758] env[62627]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62627) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 651.148109] env[62627]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62627) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 651.148158] env[62627]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62627) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 651.148540] env[62627]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 651.242475] env[62627]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62627) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 651.253296] env[62627]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=62627) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 651.854648] env[62627]: INFO nova.virt.driver [None req-a9d8c399-549e-46f4-9c1b-70df1c252fdc None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 651.929688] env[62627]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.929925] env[62627]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.930012] env[62627]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62627) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 655.130818] env[62627]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-c4f26a7b-0f59-40e0-a624-6ca17b2bb335 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.146996] env[62627]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62627) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 655.147190] env[62627]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-f00aa36f-6bec-4695-a55e-627a38aba13f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.172574] env[62627]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 1e6ee. [ 655.172715] env[62627]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.243s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.173302] env[62627]: INFO nova.virt.vmwareapi.driver [None req-a9d8c399-549e-46f4-9c1b-70df1c252fdc None None] VMware vCenter version: 7.0.3 [ 655.177098] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bd736d-dd3b-4a66-8a4d-f789f52f15f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.197796] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ddafac-8270-406a-99f1-9859c4f6fcfb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.203582] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e2391d-c00c-4dd6-86db-7f50f6071820 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.210045] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64dba671-1c7a-4679-8005-b3113b0fe999 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.223020] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e556a45b-8537-4e5e-bf03-5c1d488a8e87 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.228587] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90e3007-141d-46d1-8379-b25d81191001 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.258058] env[62627]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-79feef72-b0dc-4e38-b680-0b6176b54898 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.262966] env[62627]: DEBUG nova.virt.vmwareapi.driver [None req-a9d8c399-549e-46f4-9c1b-70df1c252fdc None None] Extension org.openstack.compute already exists. {{(pid=62627) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 655.265627] env[62627]: INFO nova.compute.provider_config [None req-a9d8c399-549e-46f4-9c1b-70df1c252fdc None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 655.768580] env[62627]: DEBUG nova.context [None req-a9d8c399-549e-46f4-9c1b-70df1c252fdc None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),e75156dc-4a26-4e48-9909-83fc837fc359(cell1) {{(pid=62627) load_cells /opt/stack/nova/nova/context.py:464}} [ 655.770697] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.770936] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.771859] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.772154] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Acquiring lock "e75156dc-4a26-4e48-9909-83fc837fc359" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.772345] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Lock "e75156dc-4a26-4e48-9909-83fc837fc359" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.773359] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Lock "e75156dc-4a26-4e48-9909-83fc837fc359" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.794183] env[62627]: INFO dbcounter [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Registered counter for database nova_cell0 [ 655.802227] env[62627]: INFO dbcounter [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Registered counter for database nova_cell1 [ 655.805549] env[62627]: DEBUG oslo_db.sqlalchemy.engines [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62627) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 655.805918] env[62627]: DEBUG oslo_db.sqlalchemy.engines [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62627) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 655.810784] env[62627]: ERROR nova.db.main.api [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 655.810784] env[62627]: result = function(*args, **kwargs) [ 655.810784] env[62627]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 655.810784] env[62627]: return func(*args, **kwargs) [ 655.810784] env[62627]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 655.810784] env[62627]: result = fn(*args, **kwargs) [ 655.810784] env[62627]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 655.810784] env[62627]: return f(*args, **kwargs) [ 655.810784] env[62627]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 655.810784] env[62627]: return db.service_get_minimum_version(context, binaries) [ 655.810784] env[62627]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 655.810784] env[62627]: _check_db_access() [ 655.810784] env[62627]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 655.810784] env[62627]: stacktrace = ''.join(traceback.format_stack()) [ 655.810784] env[62627]: [ 655.811605] env[62627]: ERROR nova.db.main.api [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 655.811605] env[62627]: result = function(*args, **kwargs) [ 655.811605] env[62627]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 655.811605] env[62627]: return func(*args, **kwargs) [ 655.811605] env[62627]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 655.811605] env[62627]: result = fn(*args, **kwargs) [ 655.811605] env[62627]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 655.811605] env[62627]: return f(*args, **kwargs) [ 655.811605] env[62627]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 655.811605] env[62627]: return db.service_get_minimum_version(context, binaries) [ 655.811605] env[62627]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 655.811605] env[62627]: _check_db_access() [ 655.811605] env[62627]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 655.811605] env[62627]: stacktrace = ''.join(traceback.format_stack()) [ 655.811605] env[62627]: [ 655.812024] env[62627]: WARNING nova.objects.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Failed to get minimum service version for cell e75156dc-4a26-4e48-9909-83fc837fc359 [ 655.812158] env[62627]: WARNING nova.objects.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 655.812583] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Acquiring lock "singleton_lock" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.812744] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Acquired lock "singleton_lock" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.812988] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Releasing lock "singleton_lock" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.813327] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Full set of CONF: {{(pid=62627) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 655.813473] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ******************************************************************************** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 655.813603] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] Configuration options gathered from: {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 655.813738] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 655.813932] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 655.814074] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ================================================================================ {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 655.814285] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] allow_resize_to_same_host = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.814455] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] arq_binding_timeout = 300 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.814587] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] backdoor_port = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.814713] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] backdoor_socket = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.814876] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] block_device_allocate_retries = 60 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.815061] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] block_device_allocate_retries_interval = 3 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.815238] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cert = self.pem {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.815405] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.815574] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute_monitors = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.815744] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] config_dir = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.815915] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] config_drive_format = iso9660 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.816066] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.816238] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] config_source = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.816406] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] console_host = devstack {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.816572] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] control_exchange = nova {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.816733] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cpu_allocation_ratio = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.816892] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] daemon = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.817071] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] debug = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.817231] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] default_access_ip_network_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.817396] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] default_availability_zone = nova {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.817550] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] default_ephemeral_format = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.817706] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] default_green_pool_size = 1000 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.817944] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.818124] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] default_schedule_zone = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.818283] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] disk_allocation_ratio = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.818442] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] enable_new_services = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.818618] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] enabled_apis = ['osapi_compute'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.818781] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] enabled_ssl_apis = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.818939] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] flat_injected = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.819113] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] force_config_drive = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.819274] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] force_raw_images = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.819444] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] graceful_shutdown_timeout = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.819605] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] heal_instance_info_cache_interval = 60 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.819837] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] host = cpu-1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.820027] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.820198] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.820359] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.820571] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.820740] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] instance_build_timeout = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.820901] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] instance_delete_interval = 300 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.821081] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] instance_format = [instance: %(uuid)s] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.821249] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] instance_name_template = instance-%08x {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.821409] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] instance_usage_audit = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.821578] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] instance_usage_audit_period = month {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.821742] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.821913] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.822093] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] internal_service_availability_zone = internal {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.822254] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] key = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.822416] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] live_migration_retry_count = 30 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.822583] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] log_color = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.822747] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] log_config_append = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.822913] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.823086] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] log_dir = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.823247] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] log_file = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.823376] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] log_options = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.823535] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] log_rotate_interval = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.823702] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] log_rotate_interval_type = days {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.823866] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] log_rotation_type = none {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.823996] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.824136] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.824303] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.824464] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.824590] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.824753] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] long_rpc_timeout = 1800 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.824913] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] max_concurrent_builds = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.825083] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] max_concurrent_live_migrations = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.825243] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] max_concurrent_snapshots = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.825400] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] max_local_block_devices = 3 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.825558] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] max_logfile_count = 30 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.825714] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] max_logfile_size_mb = 200 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.825871] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] maximum_instance_delete_attempts = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.826050] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] metadata_listen = 0.0.0.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.826219] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] metadata_listen_port = 8775 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.826384] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] metadata_workers = 2 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.826542] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] migrate_max_retries = -1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.826704] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] mkisofs_cmd = genisoimage {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.826907] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.827068] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] my_ip = 10.180.1.21 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.827281] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.827446] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] network_allocate_retries = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.827635] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.827805] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.827970] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] osapi_compute_listen_port = 8774 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.828157] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] osapi_compute_unique_server_name_scope = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.828330] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] osapi_compute_workers = 2 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.828490] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] password_length = 12 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.828652] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] periodic_enable = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.828812] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] periodic_fuzzy_delay = 60 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.828978] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] pointer_model = usbtablet {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.829162] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] preallocate_images = none {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.829322] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] publish_errors = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.829452] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] pybasedir = /opt/stack/nova {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.829608] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ram_allocation_ratio = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.829766] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] rate_limit_burst = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.829931] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] rate_limit_except_level = CRITICAL {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.830104] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] rate_limit_interval = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.830270] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] reboot_timeout = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.830428] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] reclaim_instance_interval = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.830581] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] record = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.830747] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] reimage_timeout_per_gb = 60 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.830910] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] report_interval = 120 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.831083] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] rescue_timeout = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.831247] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] reserved_host_cpus = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.831404] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] reserved_host_disk_mb = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.831559] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] reserved_host_memory_mb = 512 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.831718] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] reserved_huge_pages = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.831883] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] resize_confirm_window = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.832048] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] resize_fs_using_block_device = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.832210] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] resume_guests_state_on_host_boot = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.832377] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.832536] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] rpc_response_timeout = 60 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.832696] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] run_external_periodic_tasks = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.832862] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] running_deleted_instance_action = reap {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.833033] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.833198] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] running_deleted_instance_timeout = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.833357] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] scheduler_instance_sync_interval = 120 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.833522] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] service_down_time = 720 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.833687] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] servicegroup_driver = db {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.833842] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] shell_completion = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.834009] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] shelved_offload_time = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.834181] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] shelved_poll_interval = 3600 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.834347] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] shutdown_timeout = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.834507] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] source_is_ipv6 = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.834664] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ssl_only = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.834909] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.835112] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] sync_power_state_interval = 600 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.835280] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] sync_power_state_pool_size = 1000 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.835448] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] syslog_log_facility = LOG_USER {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.835615] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] tempdir = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.835787] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] timeout_nbd = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.835954] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] transport_url = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.836132] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] update_resources_interval = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.836294] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] use_cow_images = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.836450] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] use_journal = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.836607] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] use_json = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.836761] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] use_rootwrap_daemon = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.836915] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] use_stderr = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.837083] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] use_syslog = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.837237] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vcpu_pin_set = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.837404] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plugging_is_fatal = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.837572] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plugging_timeout = 300 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.837733] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] virt_mkfs = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.837891] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] volume_usage_poll_interval = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.838067] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] watch_log_file = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.838237] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] web = /usr/share/spice-html5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 655.838417] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.838581] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.838741] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.838907] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_concurrency.disable_process_locking = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.839473] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.839669] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.839845] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.840038] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.840219] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.840391] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.840576] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.auth_strategy = keystone {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.840745] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.compute_link_prefix = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.840924] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.841118] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.dhcp_domain = novalocal {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.841295] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.enable_instance_password = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.841461] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.glance_link_prefix = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.841628] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.841800] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.841969] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.instance_list_per_project_cells = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.842145] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.list_records_by_skipping_down_cells = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.842311] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.local_metadata_per_cell = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.842477] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.max_limit = 1000 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.842643] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.metadata_cache_expiration = 15 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.842816] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.neutron_default_tenant_id = default {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.842988] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.response_validation = warn {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.843176] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.use_neutron_default_nets = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.843346] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.843511] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.843676] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.843850] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.844045] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.vendordata_dynamic_targets = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.844219] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.vendordata_jsonfile_path = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.844402] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.844598] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.backend = dogpile.cache.memcached {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.844767] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.backend_argument = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.844932] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.backend_expiration_time = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.845126] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.config_prefix = cache.oslo {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.845301] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.dead_timeout = 60.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.845465] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.debug_cache_backend = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.845626] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.enable_retry_client = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.845785] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.enable_socket_keepalive = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.845954] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.enabled = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.846132] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.enforce_fips_mode = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.846297] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.expiration_time = 600 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.846460] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.hashclient_retry_attempts = 2 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.846625] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.846786] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.memcache_dead_retry = 300 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.846943] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.memcache_password = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.847124] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.847289] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.847449] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.memcache_pool_maxsize = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.847616] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.847766] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.memcache_sasl_enabled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.847942] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.848127] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.848288] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.memcache_username = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.848450] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.proxies = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.848610] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.redis_db = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.848767] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.redis_password = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.848935] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.849127] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.849299] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.redis_server = localhost:6379 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.849462] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.redis_socket_timeout = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.849620] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.redis_username = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.849782] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.retry_attempts = 2 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.849949] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.retry_delay = 0.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.850128] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.socket_keepalive_count = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.850293] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.socket_keepalive_idle = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.850454] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.socket_keepalive_interval = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.850610] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.tls_allowed_ciphers = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.850767] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.tls_cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.850923] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.tls_certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.851098] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.tls_enabled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.851259] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cache.tls_keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.851428] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.auth_section = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.851601] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.auth_type = password {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.851763] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.851943] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.852121] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.852289] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.852451] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.cross_az_attach = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.852613] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.debug = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.852774] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.endpoint_template = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.852938] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.http_retries = 3 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.853116] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.853277] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.853448] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.os_region_name = RegionOne {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.853611] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.853769] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cinder.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.853941] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.854115] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.cpu_dedicated_set = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.854276] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.cpu_shared_set = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.854441] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.image_type_exclude_list = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.854603] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.854765] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.854926] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.855104] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.855272] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.855438] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.resource_provider_association_refresh = 300 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.855600] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.855762] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.shutdown_retry_interval = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.855942] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.856139] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] conductor.workers = 2 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.856319] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] console.allowed_origins = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.856481] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] console.ssl_ciphers = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.856651] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] console.ssl_minimum_version = default {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.856821] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] consoleauth.enforce_session_timeout = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.856988] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] consoleauth.token_ttl = 600 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.857175] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.857334] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.857495] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.857660] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.connect_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.857809] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.connect_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.857963] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.endpoint_override = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.858139] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.858297] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.858456] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.max_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.858610] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.min_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.858765] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.region_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.858920] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.retriable_status_codes = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.859094] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.service_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.859267] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.service_type = accelerator {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.859431] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.859587] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.status_code_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.859743] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.status_code_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.859898] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.860088] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.860249] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] cyborg.version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.860417] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.asyncio_connection = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.860575] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.asyncio_slave_connection = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.860747] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.backend = sqlalchemy {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.860915] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.connection = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.861093] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.connection_debug = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.861264] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.connection_parameters = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.861428] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.connection_recycle_time = 3600 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.861588] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.connection_trace = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.861749] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.db_inc_retry_interval = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.861916] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.db_max_retries = 20 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.862089] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.db_max_retry_interval = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.862254] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.db_retry_interval = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.862415] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.max_overflow = 50 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.862575] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.max_pool_size = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.862734] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.max_retries = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.862904] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.863077] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.mysql_wsrep_sync_wait = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.863241] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.pool_timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.863404] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.retry_interval = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.863563] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.slave_connection = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.863725] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.sqlite_synchronous = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.863886] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] database.use_db_reconnect = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.864066] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.asyncio_connection = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.864229] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.asyncio_slave_connection = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.864396] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.backend = sqlalchemy {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.864562] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.connection = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.864726] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.connection_debug = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.864894] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.connection_parameters = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.865069] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.connection_recycle_time = 3600 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.865233] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.connection_trace = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.865394] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.db_inc_retry_interval = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.865555] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.db_max_retries = 20 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.865715] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.db_max_retry_interval = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.865876] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.db_retry_interval = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.866049] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.max_overflow = 50 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.866231] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.max_pool_size = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.866378] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.max_retries = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.866543] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.866700] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.866855] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.pool_timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.867029] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.retry_interval = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.867192] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.slave_connection = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.867354] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] api_database.sqlite_synchronous = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.867527] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] devices.enabled_mdev_types = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.867703] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.867874] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.868049] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ephemeral_storage_encryption.enabled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.868219] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.868389] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.api_servers = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.868552] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.868711] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.868874] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.869046] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.connect_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.869205] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.connect_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.869364] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.debug = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.869530] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.default_trusted_certificate_ids = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.869690] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.enable_certificate_validation = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.869866] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.enable_rbd_download = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.870014] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.endpoint_override = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.870183] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.870348] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.870504] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.max_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.870658] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.min_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.870818] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.num_retries = 3 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.870984] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.rbd_ceph_conf = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.871162] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.rbd_connect_timeout = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.871328] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.rbd_pool = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.871491] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.rbd_user = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.871656] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.region_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.871814] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.retriable_status_codes = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.871981] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.service_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.872160] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.service_type = image {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.872322] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.872475] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.status_code_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.872632] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.status_code_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.872789] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.872969] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.873149] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.verify_glance_signatures = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.873310] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] glance.version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.873475] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] guestfs.debug = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.873644] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.auth_section = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.873804] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.auth_type = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.873961] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.874133] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.874296] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.874451] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.connect_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.874604] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.connect_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.874759] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.endpoint_override = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.874919] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.875088] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.875250] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.max_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.875405] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.min_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.875558] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.region_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.875718] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.retriable_status_codes = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.875874] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.service_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.876057] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.service_type = shared-file-system {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.876230] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.share_apply_policy_timeout = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.876393] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.876555] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.status_code_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.876713] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.status_code_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.876871] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.877085] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.877257] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] manila.version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.877425] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] mks.enabled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.877771] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.877961] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] image_cache.manager_interval = 2400 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.878149] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] image_cache.precache_concurrency = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.878321] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] image_cache.remove_unused_base_images = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.878490] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.878659] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.878834] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] image_cache.subdirectory_name = _base {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.879018] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.api_max_retries = 60 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.879183] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.api_retry_interval = 2 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.879345] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.auth_section = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.879509] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.auth_type = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.879668] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.879824] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.879986] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.880166] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.conductor_group = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.880327] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.connect_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.880490] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.connect_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.880648] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.endpoint_override = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.880810] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.880966] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.881136] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.max_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.881294] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.min_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.881460] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.peer_list = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.881615] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.region_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.881773] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.retriable_status_codes = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.881944] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.serial_console_state_timeout = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.882112] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.service_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.882284] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.service_type = baremetal {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.882444] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.shard = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.882608] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.882767] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.status_code_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.882966] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.status_code_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.883101] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.883287] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.883449] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ironic.version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.883633] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.883806] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] key_manager.fixed_key = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.883988] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.884167] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.barbican_api_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.884327] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.barbican_endpoint = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.884497] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.barbican_endpoint_type = public {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.884656] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.barbican_region_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.884812] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.884972] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.885149] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.885310] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.885467] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.885629] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.number_of_retries = 60 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.885793] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.retry_delay = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.885955] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.send_service_user_token = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.886135] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.886295] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.886457] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.verify_ssl = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.886614] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican.verify_ssl_path = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.886779] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican_service_user.auth_section = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.886943] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican_service_user.auth_type = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.887114] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican_service_user.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.887274] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican_service_user.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.887437] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican_service_user.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.887596] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican_service_user.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.887752] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican_service_user.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.887910] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican_service_user.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.888079] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] barbican_service_user.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.888250] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vault.approle_role_id = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.888405] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vault.approle_secret_id = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.888572] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vault.kv_mountpoint = secret {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.888729] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vault.kv_path = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.888892] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vault.kv_version = 2 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.889071] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vault.namespace = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.889234] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vault.root_token_id = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.889395] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vault.ssl_ca_crt_file = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.889561] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vault.timeout = 60.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.889723] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vault.use_ssl = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.889890] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.890070] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.890232] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.890394] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.890550] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.connect_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.890710] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.connect_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.890866] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.endpoint_override = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.891038] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.891197] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.891351] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.max_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.891504] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.min_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.891656] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.region_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.891814] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.retriable_status_codes = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.892266] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.service_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.892266] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.service_type = identity {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.892365] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.892467] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.status_code_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.892626] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.status_code_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.892781] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.892961] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.893134] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] keystone.version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.893327] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.ceph_mount_options = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.893618] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.893802] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.connection_uri = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.893965] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.cpu_mode = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.894150] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.894324] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.cpu_models = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.894499] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.cpu_power_governor_high = performance {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.894670] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.894834] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.cpu_power_management = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.895018] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.895201] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.device_detach_attempts = 8 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.895369] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.device_detach_timeout = 20 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.895539] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.disk_cachemodes = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.895701] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.disk_prefix = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.895868] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.enabled_perf_events = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.896047] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.file_backed_memory = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.896219] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.gid_maps = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.896378] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.hw_disk_discard = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.896536] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.hw_machine_type = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.896703] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.images_rbd_ceph_conf = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.896870] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.897046] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.897220] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.images_rbd_glance_store_name = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.897389] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.images_rbd_pool = rbd {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.897559] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.images_type = default {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.897718] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.images_volume_group = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.897880] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.inject_key = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.898056] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.inject_partition = -2 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.898222] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.inject_password = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.898382] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.iscsi_iface = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.898543] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.iser_use_multipath = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.898706] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.898870] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.899053] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_downtime = 500 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.899221] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.899382] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.899541] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_inbound_addr = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.899701] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.899861] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.900030] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_scheme = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.900206] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_timeout_action = abort {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.900368] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_tunnelled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.900526] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_uri = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.900688] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.live_migration_with_native_tls = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.900843] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.max_queues = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.901014] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.901262] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.901430] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.nfs_mount_options = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.901717] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.901912] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.902077] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.902242] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.902406] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.902568] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.num_pcie_ports = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.902733] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.902898] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.pmem_namespaces = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.903068] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.quobyte_client_cfg = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.903363] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.903546] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.903719] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.903886] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.904061] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.rbd_secret_uuid = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.904224] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.rbd_user = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.904388] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.904560] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.904718] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.rescue_image_id = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.904875] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.rescue_kernel_id = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.905042] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.rescue_ramdisk_id = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.905217] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.905378] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.rx_queue_size = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.905549] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.smbfs_mount_options = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.905840] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.906030] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.snapshot_compression = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.906201] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.snapshot_image_format = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.906439] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.906609] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.sparse_logical_volumes = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.906769] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.swtpm_enabled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.906936] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.swtpm_group = tss {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.907121] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.swtpm_user = tss {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.907293] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.sysinfo_serial = unique {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.907451] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.tb_cache_size = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.907606] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.tx_queue_size = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.907772] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.uid_maps = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.907933] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.use_virtio_for_bridges = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.908117] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.virt_type = kvm {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.908288] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.volume_clear = zero {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.908453] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.volume_clear_size = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.908618] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.volume_use_multipath = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.908776] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.vzstorage_cache_path = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.908943] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.909127] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.909295] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.909464] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.909744] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.909928] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.vzstorage_mount_user = stack {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.910113] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.910293] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.auth_section = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.910480] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.auth_type = password {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.910645] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.910807] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.910974] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.911151] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.connect_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.911312] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.connect_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.911483] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.default_floating_pool = public {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.911641] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.endpoint_override = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.911803] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.extension_sync_interval = 600 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.911968] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.http_retries = 3 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.912146] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.912307] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.912462] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.max_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.912636] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.912796] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.min_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.912963] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.ovs_bridge = br-int {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.913145] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.physnets = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.913315] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.region_name = RegionOne {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.913475] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.retriable_status_codes = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.913643] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.service_metadata_proxy = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.913802] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.service_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.913970] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.service_type = network {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.914149] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.914310] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.status_code_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.914467] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.status_code_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.914622] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.914803] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.914964] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] neutron.version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.915154] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] notifications.bdms_in_notifications = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.915333] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] notifications.default_level = INFO {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.915498] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] notifications.include_share_mapping = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.915673] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] notifications.notification_format = unversioned {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.915836] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] notifications.notify_on_state_change = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.916022] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.916208] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] pci.alias = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.916379] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] pci.device_spec = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.916544] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] pci.report_in_placement = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.916714] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.auth_section = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.916884] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.auth_type = password {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.917064] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.917226] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.917381] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.917542] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.917697] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.connect_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.917852] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.connect_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.918014] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.default_domain_id = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.918176] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.default_domain_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.918329] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.domain_id = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.918483] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.domain_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.918635] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.endpoint_override = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.918791] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.918945] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.919122] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.max_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.919278] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.min_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.919443] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.password = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.919598] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.project_domain_id = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.919760] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.project_domain_name = Default {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.919923] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.project_id = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.920107] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.project_name = service {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.920277] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.region_name = RegionOne {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.920438] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.retriable_status_codes = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.920596] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.service_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.920762] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.service_type = placement {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.920926] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.921101] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.status_code_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.921280] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.status_code_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.921433] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.system_scope = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.921591] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.921745] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.trust_id = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.921906] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.user_domain_id = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.922086] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.user_domain_name = Default {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.922248] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.user_id = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.922418] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.username = nova {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.922597] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.922755] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] placement.version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.922934] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.cores = 20 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.923114] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.count_usage_from_placement = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.923286] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.923452] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.injected_file_content_bytes = 10240 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.923614] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.injected_file_path_length = 255 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.923775] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.injected_files = 5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.923937] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.instances = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.924112] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.key_pairs = 100 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.924276] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.metadata_items = 128 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.924437] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.ram = 51200 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.924597] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.recheck_quota = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.924761] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.server_group_members = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.924924] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.server_groups = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.925147] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.925326] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] quota.unified_limits_resource_strategy = require {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.925508] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.925674] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.925838] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] scheduler.image_metadata_prefilter = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.926006] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.926183] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] scheduler.max_attempts = 3 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.926346] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] scheduler.max_placement_results = 1000 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.926508] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.926668] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.926831] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.927015] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] scheduler.workers = 2 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.927201] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.927374] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.927553] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.927725] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.927892] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.928072] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.928244] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.928435] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.928605] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.host_subset_size = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.928771] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.928931] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.929110] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.929280] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.isolated_hosts = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.929442] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.isolated_images = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.929620] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.929781] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.929945] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.930118] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.pci_in_placement = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.930284] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.930447] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.930607] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.930766] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.930927] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.931101] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.931268] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.track_instance_changes = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.931445] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.931613] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] metrics.required = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.931774] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] metrics.weight_multiplier = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.931940] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.932118] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] metrics.weight_setting = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.932431] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.932610] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] serial_console.enabled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.932789] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] serial_console.port_range = 10000:20000 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.932962] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.933148] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.933319] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] serial_console.serialproxy_port = 6083 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.933489] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] service_user.auth_section = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.933662] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] service_user.auth_type = password {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.933822] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] service_user.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.933980] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] service_user.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.934159] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] service_user.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.934323] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] service_user.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.934479] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] service_user.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.934648] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] service_user.send_service_user_token = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.934811] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] service_user.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.934969] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] service_user.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.935156] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.agent_enabled = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.935321] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.enabled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.935632] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.935848] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.936044] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.html5proxy_port = 6082 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.936214] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.image_compression = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.936376] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.jpeg_compression = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.936536] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.playback_compression = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.936697] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.require_secure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.936866] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.server_listen = 127.0.0.1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.937050] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.937214] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.streaming_mode = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.937371] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] spice.zlib_compression = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.937536] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] upgrade_levels.baseapi = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.937706] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] upgrade_levels.compute = auto {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.937864] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] upgrade_levels.conductor = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.938038] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] upgrade_levels.scheduler = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.938213] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.938376] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.938534] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vendordata_dynamic_auth.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.938692] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vendordata_dynamic_auth.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.938856] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.939037] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vendordata_dynamic_auth.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.939201] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.939365] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.939522] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vendordata_dynamic_auth.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.939695] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.api_retry_count = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.939856] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.ca_file = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.940038] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.940215] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.cluster_name = testcl1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.940383] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.connection_pool_size = 10 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.940543] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.console_delay_seconds = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.940713] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.datastore_regex = ^datastore.* {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.940921] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.941110] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.host_password = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.941283] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.host_port = 443 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.941451] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.host_username = administrator@vsphere.local {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.941618] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.insecure = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.941778] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.integration_bridge = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.941952] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.maximum_objects = 100 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.942119] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.pbm_default_policy = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.942284] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.pbm_enabled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.942441] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.pbm_wsdl_location = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.942610] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.942769] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.serial_port_proxy_uri = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.942928] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.serial_port_service_uri = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.943111] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.task_poll_interval = 0.5 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.943287] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.use_linked_clone = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.943454] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.vnc_keymap = en-us {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.943621] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.vnc_port = 5900 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.943787] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vmware.vnc_port_total = 10000 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.943975] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vnc.auth_schemes = ['none'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.944166] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vnc.enabled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.944464] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.944650] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.944824] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vnc.novncproxy_port = 6080 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.945033] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vnc.server_listen = 127.0.0.1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.945224] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.945385] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vnc.vencrypt_ca_certs = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.945540] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vnc.vencrypt_client_cert = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.945695] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vnc.vencrypt_client_key = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.945868] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.946052] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.disable_deep_image_inspection = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.946219] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.946383] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.946541] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.946702] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.disable_rootwrap = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.946863] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.enable_numa_live_migration = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.947036] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.947202] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.947365] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.947527] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.libvirt_disable_apic = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.947686] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.947848] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.948021] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.948189] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.948352] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.948511] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.948670] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.948827] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.948984] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.949164] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.949350] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.949520] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] wsgi.client_socket_timeout = 900 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.949685] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] wsgi.default_pool_size = 1000 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.949868] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] wsgi.keep_alive = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.950040] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] wsgi.max_header_line = 16384 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.950208] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.950369] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] wsgi.ssl_ca_file = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.950530] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] wsgi.ssl_cert_file = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.950688] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] wsgi.ssl_key_file = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.950855] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] wsgi.tcp_keepidle = 600 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.951041] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.951214] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] zvm.ca_file = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.951374] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] zvm.cloud_connector_url = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.951669] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.951854] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] zvm.reachable_timeout = 300 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.952028] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.952215] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.952397] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler.connection_string = messaging:// {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.952565] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler.enabled = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.952735] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler.es_doc_type = notification {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.952898] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler.es_scroll_size = 10000 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.953476] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler.es_scroll_time = 2m {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.953476] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler.filter_error_trace = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.953476] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler.hmac_keys = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.953619] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler.sentinel_service_name = mymaster {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.953720] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler.socket_timeout = 0.1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.953881] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler.trace_requests = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.954055] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler.trace_sqlalchemy = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.954233] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler_jaeger.process_tags = {} {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.954394] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler_jaeger.service_name_prefix = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.954556] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] profiler_otlp.service_name_prefix = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.954723] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] remote_debug.host = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.954882] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] remote_debug.port = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.955079] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.955249] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.955413] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.955574] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.955737] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.955896] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.956071] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.956236] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.956397] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.956567] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.956725] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.956894] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.957080] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.957249] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.957419] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.957583] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.957746] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.957912] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.958088] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.958253] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.958418] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.958582] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.958743] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.958902] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.959075] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.959238] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.959399] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.959559] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.959720] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.959881] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.ssl = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.960061] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.960233] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.960399] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.960565] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.960732] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.960894] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.961104] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.961274] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_notifications.retry = -1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.961451] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.961621] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.961799] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.auth_section = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.961966] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.auth_type = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.962138] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.cafile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.962301] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.certfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.962465] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.collect_timing = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.962623] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.connect_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.962782] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.connect_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.962938] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.endpoint_id = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.963119] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.endpoint_interface = publicURL {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.963276] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.endpoint_override = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.963430] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.endpoint_region_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.963584] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.endpoint_service_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.963738] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.endpoint_service_type = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.963896] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.insecure = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.964061] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.keyfile = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.964218] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.max_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.964372] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.min_version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.964531] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.region_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.964676] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.retriable_status_codes = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.964825] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.service_name = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.964975] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.service_type = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.965143] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.split_loggers = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.965293] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.status_code_retries = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.965443] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.status_code_retry_delay = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.965595] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.timeout = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.965749] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.valid_interfaces = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.965903] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_limit.version = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.966078] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_reports.file_event_handler = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.966245] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.966401] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] oslo_reports.log_dir = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.966570] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.966727] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.966884] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.967063] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.967231] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.967390] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.967560] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.967721] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_ovs_privileged.group = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.967879] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.968059] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.968229] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.968388] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] vif_plug_ovs_privileged.user = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.968557] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.968735] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.968908] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.969100] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.969278] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.969449] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.969618] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.969783] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.969959] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.970145] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_ovs.isolate_vif = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.970312] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.970476] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.970648] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.970815] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.970977] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] os_vif_ovs.per_port_bridge = False {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.971160] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] privsep_osbrick.capabilities = [21] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.971317] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] privsep_osbrick.group = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.971472] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] privsep_osbrick.helper_command = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.971636] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.971799] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.971962] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] privsep_osbrick.user = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.972152] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.972314] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] nova_sys_admin.group = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.972472] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] nova_sys_admin.helper_command = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.972634] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.972795] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.972953] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] nova_sys_admin.user = None {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 655.973097] env[62627]: DEBUG oslo_service.service [None req-a405ca85-b3d5-4e53-8754-1069a5e28096 None None] ******************************************************************************** {{(pid=62627) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 655.973513] env[62627]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 656.477130] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Getting list of instances from cluster (obj){ [ 656.477130] env[62627]: value = "domain-c8" [ 656.477130] env[62627]: _type = "ClusterComputeResource" [ 656.477130] env[62627]: } {{(pid=62627) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 656.478233] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422b01fc-41c1-4202-8d11-a0fe9ddb0132 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.487239] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Got total of 0 instances {{(pid=62627) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 656.487887] env[62627]: WARNING nova.virt.vmwareapi.driver [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 656.488438] env[62627]: INFO nova.virt.node [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Generated node identity c17e7f32-68d8-4e2f-8979-15d701de02f7 [ 656.488730] env[62627]: INFO nova.virt.node [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Wrote node identity c17e7f32-68d8-4e2f-8979-15d701de02f7 to /opt/stack/data/n-cpu-1/compute_id [ 656.991465] env[62627]: WARNING nova.compute.manager [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Compute nodes ['c17e7f32-68d8-4e2f-8979-15d701de02f7'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 657.997666] env[62627]: INFO nova.compute.manager [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 659.002565] env[62627]: WARNING nova.compute.manager [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 659.002895] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.003095] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.003253] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.003406] env[62627]: DEBUG nova.compute.resource_tracker [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 659.004338] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f495f8-eec3-4c4a-b26f-34a95779e9b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.012470] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae41d4de-c110-4db9-a032-376e19c54365 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.027127] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5698a5a-638a-4a0c-afdf-8d66d27acd95 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.033246] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272b98ce-08f2-4629-a175-33af7378dc5a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.061326] env[62627]: DEBUG nova.compute.resource_tracker [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181235MB free_disk=178GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 659.061471] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.061635] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.563627] env[62627]: WARNING nova.compute.resource_tracker [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] No compute node record for cpu-1:c17e7f32-68d8-4e2f-8979-15d701de02f7: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host c17e7f32-68d8-4e2f-8979-15d701de02f7 could not be found. [ 660.066867] env[62627]: INFO nova.compute.resource_tracker [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: c17e7f32-68d8-4e2f-8979-15d701de02f7 [ 661.575744] env[62627]: DEBUG nova.compute.resource_tracker [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 661.576189] env[62627]: DEBUG nova.compute.resource_tracker [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 661.735019] env[62627]: INFO nova.scheduler.client.report [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] [req-1ac7b193-f656-4cb7-a256-3e3df7678f0e] Created resource provider record via placement API for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 661.752275] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f028b708-ec39-4e5e-9aca-0e653a42de1b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.760170] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35533ad7-c6e3-485b-80dd-9aea911be9ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.790421] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5f10cf-40d7-4dc0-8f02-596e07e44c3b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.797869] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b575aa-d724-450d-8063-66a8cc0ec6d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.810953] env[62627]: DEBUG nova.compute.provider_tree [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 662.346160] env[62627]: DEBUG nova.scheduler.client.report [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 662.346391] env[62627]: DEBUG nova.compute.provider_tree [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 0 to 1 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 662.346532] env[62627]: DEBUG nova.compute.provider_tree [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 662.394113] env[62627]: DEBUG nova.compute.provider_tree [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 1 to 2 during operation: update_traits {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 662.899079] env[62627]: DEBUG nova.compute.resource_tracker [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 662.899079] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.837s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.899382] env[62627]: DEBUG nova.service [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Creating RPC server for service compute {{(pid=62627) start /opt/stack/nova/nova/service.py:186}} [ 662.912484] env[62627]: DEBUG nova.service [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] Join ServiceGroup membership for this service compute {{(pid=62627) start /opt/stack/nova/nova/service.py:203}} [ 662.912658] env[62627]: DEBUG nova.servicegroup.drivers.db [None req-d38c36d3-d39f-410d-9c9c-cd59d1571f8e None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62627) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 673.916073] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 674.418751] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Getting list of instances from cluster (obj){ [ 674.418751] env[62627]: value = "domain-c8" [ 674.418751] env[62627]: _type = "ClusterComputeResource" [ 674.418751] env[62627]: } {{(pid=62627) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 674.419935] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199f8ce6-648d-4943-b6c7-efa47f04a56b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.428824] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Got total of 0 instances {{(pid=62627) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 674.429051] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 674.429349] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Getting list of instances from cluster (obj){ [ 674.429349] env[62627]: value = "domain-c8" [ 674.429349] env[62627]: _type = "ClusterComputeResource" [ 674.429349] env[62627]: } {{(pid=62627) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 674.430213] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feec485a-15d8-4db1-8d79-ab49252fa9eb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.437356] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Got total of 0 instances {{(pid=62627) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 707.582977] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.583277] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.087997] env[62627]: DEBUG nova.compute.manager [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 708.495898] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "77418dff-6bdc-4e0a-954a-45c80960dbf1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.496318] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "77418dff-6bdc-4e0a-954a-45c80960dbf1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.629499] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.629772] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.631956] env[62627]: INFO nova.compute.claims [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.960562] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquiring lock "f15982c3-23ed-4f25-bf55-3418cac117cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.960806] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Lock "f15982c3-23ed-4f25-bf55-3418cac117cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.002026] env[62627]: DEBUG nova.compute.manager [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 709.464599] env[62627]: DEBUG nova.compute.manager [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 709.472677] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "a674db15-ea4e-44c8-abc1-6af425286517" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.475365] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "a674db15-ea4e-44c8-abc1-6af425286517" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.538080] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.731685] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db95dbd7-5e64-4981-85c4-b16e1d3e6523 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.745323] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e3b351-f638-404d-ac73-926e705264b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.781920] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8489342f-2a82-47fd-a740-6823edb597d1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.789699] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed07fc7-691c-4583-b121-9881ad3e65df {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.805755] env[62627]: DEBUG nova.compute.provider_tree [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.982814] env[62627]: DEBUG nova.compute.manager [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 710.012305] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.310562] env[62627]: DEBUG nova.scheduler.client.report [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 710.515543] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.602705] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "ab999825-4310-4fd8-81f0-6bb5dde4175b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.602902] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.821158] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.821697] env[62627]: DEBUG nova.compute.manager [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 710.830768] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.293s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.834487] env[62627]: INFO nova.compute.claims [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.982165] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquiring lock "1298559d-14fb-422c-8206-a35a747f0c06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.982165] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Lock "1298559d-14fb-422c-8206-a35a747f0c06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.105072] env[62627]: DEBUG nova.compute.manager [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 711.305017] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.305223] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.305364] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 711.305492] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Rebuilding the list of instances to heal {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 711.337445] env[62627]: DEBUG nova.compute.utils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 711.340642] env[62627]: DEBUG nova.compute.manager [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 711.340642] env[62627]: DEBUG nova.network.neutron [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 711.488092] env[62627]: DEBUG nova.compute.manager [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 711.641491] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.815707] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Skipping network cache update for instance because it is Building. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 711.815707] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Skipping network cache update for instance because it is Building. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 711.815707] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Didn't find any instances for network info cache update. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 711.815707] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.815707] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.815707] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.816261] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.816669] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.817308] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.818256] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 711.818256] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.844487] env[62627]: DEBUG nova.compute.manager [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 711.901760] env[62627]: DEBUG nova.policy [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ccb8d9c080cb4714a3de5547a2dbd753', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f0d1cf389f041c690185c2b2399855e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 712.013607] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.032427] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84f14db-f7f1-4336-a21f-065d9abae032 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.041775] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca26143-1822-4a6e-be61-a81eb0f8d5d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.079942] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a8112b-eec9-4593-b891-5326006d696f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.088604] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9accbc57-a4d1-4feb-857d-df15f98978ea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.103914] env[62627]: DEBUG nova.compute.provider_tree [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.322544] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.428158] env[62627]: DEBUG nova.network.neutron [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Successfully created port: a1a51631-0dc2-427d-a041-f4c9f944ea3d {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.608998] env[62627]: DEBUG nova.scheduler.client.report [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 712.856126] env[62627]: DEBUG nova.compute.manager [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 712.897854] env[62627]: DEBUG nova.virt.hardware [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 712.898117] env[62627]: DEBUG nova.virt.hardware [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.898277] env[62627]: DEBUG nova.virt.hardware [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 712.898463] env[62627]: DEBUG nova.virt.hardware [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.898611] env[62627]: DEBUG nova.virt.hardware [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 712.898760] env[62627]: DEBUG nova.virt.hardware [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 712.898966] env[62627]: DEBUG nova.virt.hardware [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 712.899886] env[62627]: DEBUG nova.virt.hardware [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 712.900613] env[62627]: DEBUG nova.virt.hardware [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 712.900613] env[62627]: DEBUG nova.virt.hardware [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 712.901731] env[62627]: DEBUG nova.virt.hardware [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 712.901731] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de1a28f-ea5a-4f87-8db3-230b210bc17b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.913234] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf0d95c-b60b-4fdd-9808-393ebb3c1efc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.933937] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60057e24-fef5-48a5-9d29-64ffc14bff4f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.119393] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.289s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.120083] env[62627]: DEBUG nova.compute.manager [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 713.122622] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.110s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.126415] env[62627]: INFO nova.compute.claims [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 713.629867] env[62627]: DEBUG nova.compute.utils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 713.638097] env[62627]: DEBUG nova.compute.manager [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 713.638097] env[62627]: DEBUG nova.network.neutron [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 713.791705] env[62627]: DEBUG nova.policy [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3c4f7676644413395ef9f0026d96f67', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '559dec0891b747438b15609a2f41ac0f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 714.143026] env[62627]: DEBUG nova.compute.manager [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 714.152055] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "7366e4b0-bdba-435d-844d-8e81c4464b31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.152296] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "7366e4b0-bdba-435d-844d-8e81c4464b31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.293440] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c4b95c-2bf6-4559-831b-896a754002bf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.304069] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e61ad4-4497-4e97-8c7c-6ad1799a0dae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.339429] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915cd940-de9c-40ce-b7bf-d9be67082c0e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.346825] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c117cde-4d35-4fc3-98ef-8ed35e18571e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.363118] env[62627]: DEBUG nova.compute.provider_tree [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.644735] env[62627]: DEBUG nova.network.neutron [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Successfully created port: 79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.657538] env[62627]: DEBUG nova.compute.manager [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 714.872048] env[62627]: DEBUG nova.scheduler.client.report [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 715.156310] env[62627]: DEBUG nova.compute.manager [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 715.186687] env[62627]: DEBUG nova.virt.hardware [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 715.186969] env[62627]: DEBUG nova.virt.hardware [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.187145] env[62627]: DEBUG nova.virt.hardware [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 715.187317] env[62627]: DEBUG nova.virt.hardware [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.187460] env[62627]: DEBUG nova.virt.hardware [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 715.187605] env[62627]: DEBUG nova.virt.hardware [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 715.190836] env[62627]: DEBUG nova.virt.hardware [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 715.193205] env[62627]: DEBUG nova.virt.hardware [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 715.193205] env[62627]: DEBUG nova.virt.hardware [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 715.193205] env[62627]: DEBUG nova.virt.hardware [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 715.193205] env[62627]: DEBUG nova.virt.hardware [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 715.193696] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7243fb34-0c4c-49d0-abbe-004cdfd35425 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.199343] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.205735] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4de322-3047-4774-9c28-944c45b1fe89 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.380510] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.384343] env[62627]: DEBUG nova.compute.manager [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 715.387884] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.873s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.388832] env[62627]: INFO nova.compute.claims [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.689148] env[62627]: DEBUG nova.network.neutron [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Successfully updated port: a1a51631-0dc2-427d-a041-f4c9f944ea3d {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 715.897442] env[62627]: DEBUG nova.compute.utils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 715.898828] env[62627]: DEBUG nova.compute.manager [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 715.899017] env[62627]: DEBUG nova.network.neutron [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 716.158883] env[62627]: DEBUG nova.policy [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9337664e56cb46a98a34b0a4515c4236', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f2de9bc7c3a481ba6c90634dc99d148', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 716.191420] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "refresh_cache-5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.191581] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquired lock "refresh_cache-5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.191875] env[62627]: DEBUG nova.network.neutron [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 716.404238] env[62627]: DEBUG nova.compute.manager [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 716.549028] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09ec6ed-1b7f-4149-bc6b-3bdfd14eb0ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.556604] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74da2747-ab87-482c-9c3c-c0adeff5b296 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.587358] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9be8db-c19f-46e5-9ad3-6df0930a5da2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.595088] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447505b4-436e-4dc3-a72e-e9fe91845ed2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.608746] env[62627]: DEBUG nova.compute.provider_tree [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.807595] env[62627]: DEBUG nova.network.neutron [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.990019] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "c1fd6afb-9332-446d-a441-5942b60edc94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.990019] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "c1fd6afb-9332-446d-a441-5942b60edc94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.115601] env[62627]: DEBUG nova.scheduler.client.report [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 717.417840] env[62627]: DEBUG nova.compute.manager [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 717.458539] env[62627]: DEBUG nova.virt.hardware [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 717.462782] env[62627]: DEBUG nova.virt.hardware [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.462782] env[62627]: DEBUG nova.virt.hardware [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 717.462782] env[62627]: DEBUG nova.virt.hardware [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.462782] env[62627]: DEBUG nova.virt.hardware [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 717.462782] env[62627]: DEBUG nova.virt.hardware [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 717.462987] env[62627]: DEBUG nova.virt.hardware [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 717.462987] env[62627]: DEBUG nova.virt.hardware [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 717.462987] env[62627]: DEBUG nova.virt.hardware [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 717.462987] env[62627]: DEBUG nova.virt.hardware [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 717.462987] env[62627]: DEBUG nova.virt.hardware [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 717.463189] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1d4d6b-a63f-4ff1-9a98-2251b7cc7028 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.466399] env[62627]: DEBUG nova.network.neutron [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Updating instance_info_cache with network_info: [{"id": "a1a51631-0dc2-427d-a041-f4c9f944ea3d", "address": "fa:16:3e:b2:1d:31", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1a51631-0d", "ovs_interfaceid": "a1a51631-0dc2-427d-a041-f4c9f944ea3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.474074] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3d51f8-bb74-4033-ab2c-03f5c9e54b2a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.491161] env[62627]: DEBUG nova.compute.manager [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 717.590985] env[62627]: DEBUG nova.network.neutron [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Successfully updated port: 79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 717.620608] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.621220] env[62627]: DEBUG nova.compute.manager [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 717.623944] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.983s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.625399] env[62627]: INFO nova.compute.claims [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.702223] env[62627]: DEBUG nova.network.neutron [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Successfully created port: db80642f-6b36-430d-badf-9ba7dacbbad6 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 717.970700] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Releasing lock "refresh_cache-5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.970700] env[62627]: DEBUG nova.compute.manager [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Instance network_info: |[{"id": "a1a51631-0dc2-427d-a041-f4c9f944ea3d", "address": "fa:16:3e:b2:1d:31", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1a51631-0d", "ovs_interfaceid": "a1a51631-0dc2-427d-a041-f4c9f944ea3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 717.971353] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:1d:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1a51631-0dc2-427d-a041-f4c9f944ea3d', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.992042] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 717.992042] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0afeedf5-8fd1-43bb-8617-d1a6a93e6edc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.010194] env[62627]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 718.010194] env[62627]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62627) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 718.010803] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 718.011148] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Creating folder: Project (0f0d1cf389f041c690185c2b2399855e). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 718.011476] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-628651cf-9a83-4c72-b038-46203f0993f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.026720] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Created folder: Project (0f0d1cf389f041c690185c2b2399855e) in parent group-v447541. [ 718.027049] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Creating folder: Instances. Parent ref: group-v447545. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 718.028732] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.028732] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47a0ba8e-7825-45f6-968a-3157feb8b400 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.040567] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Created folder: Instances in parent group-v447545. [ 718.040860] env[62627]: DEBUG oslo.service.loopingcall [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 718.041066] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 718.041673] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5f1f8e1-9d34-4a7f-a2c4-234d73ce1754 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.068379] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "77c60059-535a-47ca-a5b0-45cc1df93b45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.068624] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "77c60059-535a-47ca-a5b0-45cc1df93b45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.073038] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 718.073038] env[62627]: value = "task-2193899" [ 718.073038] env[62627]: _type = "Task" [ 718.073038] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.086258] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193899, 'name': CreateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.099704] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "refresh_cache-77418dff-6bdc-4e0a-954a-45c80960dbf1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.099704] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquired lock "refresh_cache-77418dff-6bdc-4e0a-954a-45c80960dbf1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.099853] env[62627]: DEBUG nova.network.neutron [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 718.130070] env[62627]: DEBUG nova.compute.utils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 718.133650] env[62627]: DEBUG nova.compute.manager [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 718.133832] env[62627]: DEBUG nova.network.neutron [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.255619] env[62627]: DEBUG nova.policy [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '137df2297d4e4317ba82a3c6faf09bc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b12d6c9026644c9cb45d687b92161121', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 718.417741] env[62627]: DEBUG nova.compute.manager [req-f98ee097-9ce6-42c0-909e-7613c525e5f3 req-05507f71-f506-460f-9778-a1281426f3d5 service nova] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Received event network-vif-plugged-a1a51631-0dc2-427d-a041-f4c9f944ea3d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 718.417951] env[62627]: DEBUG oslo_concurrency.lockutils [req-f98ee097-9ce6-42c0-909e-7613c525e5f3 req-05507f71-f506-460f-9778-a1281426f3d5 service nova] Acquiring lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.418206] env[62627]: DEBUG oslo_concurrency.lockutils [req-f98ee097-9ce6-42c0-909e-7613c525e5f3 req-05507f71-f506-460f-9778-a1281426f3d5 service nova] Lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.418455] env[62627]: DEBUG oslo_concurrency.lockutils [req-f98ee097-9ce6-42c0-909e-7613c525e5f3 req-05507f71-f506-460f-9778-a1281426f3d5 service nova] Lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.420739] env[62627]: DEBUG nova.compute.manager [req-f98ee097-9ce6-42c0-909e-7613c525e5f3 req-05507f71-f506-460f-9778-a1281426f3d5 service nova] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] No waiting events found dispatching network-vif-plugged-a1a51631-0dc2-427d-a041-f4c9f944ea3d {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 718.420982] env[62627]: WARNING nova.compute.manager [req-f98ee097-9ce6-42c0-909e-7613c525e5f3 req-05507f71-f506-460f-9778-a1281426f3d5 service nova] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Received unexpected event network-vif-plugged-a1a51631-0dc2-427d-a041-f4c9f944ea3d for instance with vm_state building and task_state spawning. [ 718.577030] env[62627]: DEBUG nova.compute.manager [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 718.593170] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193899, 'name': CreateVM_Task, 'duration_secs': 0.458311} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.593170] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 718.620958] env[62627]: DEBUG oslo_vmware.service [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7aee6d1-637c-4362-bdae-b2dd0d3a648e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.628101] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.628504] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.632330] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 718.632330] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ffe8922-97ec-4b39-8afc-4818f34a541f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.633922] env[62627]: DEBUG nova.compute.manager [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 718.641941] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 718.641941] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5290d052-1f23-9b65-2c81-8d0e6b468486" [ 718.641941] env[62627]: _type = "Task" [ 718.641941] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.665022] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.665022] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.665022] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.665022] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.666294] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.666294] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2708f1c2-ecf3-4539-a4c9-34088b596f67 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.683129] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 718.685523] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 718.687650] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6438d681-8084-4158-92ed-71ab38223df1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.701237] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f82003e2-1034-4fe4-a245-cee484d9b4b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.704552] env[62627]: DEBUG nova.network.neutron [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.713192] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 718.713192] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52bd43ae-a87e-f820-ed5f-dec6edcef0ff" [ 718.713192] env[62627]: _type = "Task" [ 718.713192] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.731198] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52bd43ae-a87e-f820-ed5f-dec6edcef0ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.864400] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bc5cbd-1dcb-4545-b2c9-11fdeb855eea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.884650] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5687e8-e9cf-4510-81e4-3b0933e9885f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.920628] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a76efa9-71a8-4aef-8322-96c278706dcf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.929368] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b58e01-b359-4c6d-9626-1222cf39872f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.945553] env[62627]: DEBUG nova.compute.provider_tree [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.043165] env[62627]: DEBUG nova.network.neutron [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Updating instance_info_cache with network_info: [{"id": "79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7", "address": "fa:16:3e:c1:30:1f", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.172", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79ffbb02-e4", "ovs_interfaceid": "79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.117280] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.229337] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Preparing fetch location {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 719.229512] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Creating directory with path [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 719.231449] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef02bb34-9faf-40e2-b1d8-2bb020738dba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.234555] env[62627]: DEBUG nova.network.neutron [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Successfully created port: 246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.255561] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Created directory with path [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 719.255561] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Fetch image to [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 719.255561] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Downloading image file data d5497cf1-16e8-478a-a834-e9289b1751b7 to [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk on the data store datastore2 {{(pid=62627) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 719.255561] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04b8866-e8b8-42eb-b674-f01985cce7d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.264777] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed01d73d-882a-4a06-94c8-3edd5fb165cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.280596] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73543eee-0f60-43e7-9e09-628319a6e6b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.314105] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75380b8-6b7b-431b-9f10-b2c989b4996d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.323651] env[62627]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dc65ec0e-6939-4a8b-a6e0-c14d93239d68 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.419343] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Downloading image file data d5497cf1-16e8-478a-a834-e9289b1751b7 to the data store datastore2 {{(pid=62627) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 719.453022] env[62627]: DEBUG nova.scheduler.client.report [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 719.497284] env[62627]: DEBUG oslo_vmware.rw_handles [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62627) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 719.575856] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Releasing lock "refresh_cache-77418dff-6bdc-4e0a-954a-45c80960dbf1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.581419] env[62627]: DEBUG nova.compute.manager [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Instance network_info: |[{"id": "79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7", "address": "fa:16:3e:c1:30:1f", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.172", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79ffbb02-e4", "ovs_interfaceid": "79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 719.581560] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:30:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 719.588876] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Creating folder: Project (559dec0891b747438b15609a2f41ac0f). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 719.590486] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d340e16f-54d5-49b8-9807-0815f453f761 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.617031] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Created folder: Project (559dec0891b747438b15609a2f41ac0f) in parent group-v447541. [ 719.617324] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Creating folder: Instances. Parent ref: group-v447548. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 719.623367] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5433665-abf7-45fb-8d4a-48b527d77c00 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.635803] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Created folder: Instances in parent group-v447548. [ 719.636083] env[62627]: DEBUG oslo.service.loopingcall [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.636709] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 719.637098] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8872f42-5a00-4f14-becd-af2f6843bbb7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.658501] env[62627]: DEBUG nova.compute.manager [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 719.676472] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 719.676472] env[62627]: value = "task-2193902" [ 719.676472] env[62627]: _type = "Task" [ 719.676472] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.693882] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193902, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.696842] env[62627]: DEBUG nova.virt.hardware [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 719.697112] env[62627]: DEBUG nova.virt.hardware [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.697350] env[62627]: DEBUG nova.virt.hardware [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 719.697489] env[62627]: DEBUG nova.virt.hardware [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.697639] env[62627]: DEBUG nova.virt.hardware [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 719.697839] env[62627]: DEBUG nova.virt.hardware [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 719.698181] env[62627]: DEBUG nova.virt.hardware [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 719.698421] env[62627]: DEBUG nova.virt.hardware [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 719.698597] env[62627]: DEBUG nova.virt.hardware [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 719.698784] env[62627]: DEBUG nova.virt.hardware [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 719.698933] env[62627]: DEBUG nova.virt.hardware [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 719.701495] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcc10c7-10d5-4b2a-9544-fa6ae6f5e601 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.713038] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5dd502-2859-44d1-bbf6-6daca0bb54ee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.956843] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.956843] env[62627]: DEBUG nova.compute.manager [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 719.960195] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.947s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.961341] env[62627]: INFO nova.compute.claims [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 720.193786] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193902, 'name': CreateVM_Task, 'duration_secs': 0.429762} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.193960] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 720.194649] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.194808] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.195138] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 720.195382] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dcc4d58-cf20-4ce7-a89f-27a4bced4499 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.201451] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 720.201451] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f8e553-d59b-6617-d3d1-be116c816980" [ 720.201451] env[62627]: _type = "Task" [ 720.201451] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.211373] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f8e553-d59b-6617-d3d1-be116c816980, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.222201] env[62627]: DEBUG oslo_vmware.rw_handles [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Completed reading data from the image iterator. {{(pid=62627) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 720.222998] env[62627]: DEBUG oslo_vmware.rw_handles [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 720.375393] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Downloaded image file data d5497cf1-16e8-478a-a834-e9289b1751b7 to vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk on the data store datastore2 {{(pid=62627) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 720.376067] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Caching image {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 720.376341] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Copying Virtual Disk [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk to [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 720.377059] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e76b70d-13db-469f-854f-4771ee5ff216 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.387471] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 720.387471] env[62627]: value = "task-2193904" [ 720.387471] env[62627]: _type = "Task" [ 720.387471] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.395811] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.468103] env[62627]: DEBUG nova.compute.utils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 720.470509] env[62627]: DEBUG nova.compute.manager [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 720.470938] env[62627]: DEBUG nova.network.neutron [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 720.641189] env[62627]: DEBUG nova.policy [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0896d768b334478bb19804bbb156f4bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4db2144e94840c4a422d2d933b4a65f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 720.717028] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.717081] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 720.717469] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.899379] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193904, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.973078] env[62627]: DEBUG nova.compute.manager [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 721.243629] env[62627]: DEBUG nova.network.neutron [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Successfully updated port: db80642f-6b36-430d-badf-9ba7dacbbad6 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 721.280868] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201c3cb7-68ab-4c09-b532-b2c80e6bae26 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.292510] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60405315-652e-4907-9e81-041d42a5f34d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.327657] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb40536c-5e2b-4d22-b9d7-22bb51bb20c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.338719] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47373339-619f-42fa-8e13-f2b5000d8cfd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.360262] env[62627]: DEBUG nova.compute.provider_tree [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.401609] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193904, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.718199} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.404204] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Copied Virtual Disk [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk to [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 721.404204] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Deleting the datastore file [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 721.404204] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f82c484a-5e2f-4d50-bf93-b7d5e74260d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.412425] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 721.412425] env[62627]: value = "task-2193905" [ 721.412425] env[62627]: _type = "Task" [ 721.412425] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.423419] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193905, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.652797] env[62627]: DEBUG nova.compute.manager [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Received event network-changed-a1a51631-0dc2-427d-a041-f4c9f944ea3d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 721.652797] env[62627]: DEBUG nova.compute.manager [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Refreshing instance network info cache due to event network-changed-a1a51631-0dc2-427d-a041-f4c9f944ea3d. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 721.652797] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] Acquiring lock "refresh_cache-5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.654444] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] Acquired lock "refresh_cache-5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.654444] env[62627]: DEBUG nova.network.neutron [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Refreshing network info cache for port a1a51631-0dc2-427d-a041-f4c9f944ea3d {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.754079] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquiring lock "refresh_cache-f15982c3-23ed-4f25-bf55-3418cac117cc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.754246] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquired lock "refresh_cache-f15982c3-23ed-4f25-bf55-3418cac117cc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.754454] env[62627]: DEBUG nova.network.neutron [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.864914] env[62627]: DEBUG nova.scheduler.client.report [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 721.923812] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193905, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026504} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.924227] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 721.924498] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Moving file from [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b/d5497cf1-16e8-478a-a834-e9289b1751b7 to [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7. {{(pid=62627) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 721.924806] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-78040748-e6a8-4f44-921a-5742353e011c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.934167] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 721.934167] env[62627]: value = "task-2193906" [ 721.934167] env[62627]: _type = "Task" [ 721.934167] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.944384] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193906, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.990644] env[62627]: DEBUG nova.compute.manager [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 722.021342] env[62627]: DEBUG nova.virt.hardware [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 722.021342] env[62627]: DEBUG nova.virt.hardware [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 722.021342] env[62627]: DEBUG nova.virt.hardware [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 722.021342] env[62627]: DEBUG nova.virt.hardware [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 722.021852] env[62627]: DEBUG nova.virt.hardware [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 722.022319] env[62627]: DEBUG nova.virt.hardware [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 722.022781] env[62627]: DEBUG nova.virt.hardware [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 722.025663] env[62627]: DEBUG nova.virt.hardware [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 722.025663] env[62627]: DEBUG nova.virt.hardware [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 722.025663] env[62627]: DEBUG nova.virt.hardware [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 722.025663] env[62627]: DEBUG nova.virt.hardware [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 722.025663] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afcf916d-314e-4be7-98c0-c0b7ea580bff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.035148] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6588803-79dd-4794-96f0-e8abcc388f44 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.371948] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.373085] env[62627]: DEBUG nova.compute.manager [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 722.377576] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.055s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.378206] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.378206] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 722.379627] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.179s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.380192] env[62627]: INFO nova.compute.claims [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.385136] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6ff924-e96c-499e-bc7a-72b52d303029 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.398132] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824dfa99-00f4-412b-8d17-2578f7e09e20 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.428982] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6a6d15-027a-4874-8619-0a1bdef0b99f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.444126] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc71880-65a2-4408-b89c-42f4381fc679 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.454841] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193906, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.027471} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.456695] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] File moved {{(pid=62627) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 722.456992] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Cleaning up location [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 722.457502] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Deleting the datastore file [datastore2] vmware_temp/78c683e2-4ef6-4ef7-801b-2110e198601b {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 722.457769] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52f612a3-103d-4acc-993e-a729f172ea0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.467195] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 722.467195] env[62627]: value = "task-2193908" [ 722.467195] env[62627]: _type = "Task" [ 722.467195] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.468150] env[62627]: DEBUG nova.network.neutron [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.505918] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181237MB free_disk=178GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 722.506034] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.507156] env[62627]: DEBUG nova.network.neutron [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Successfully created port: a7d230b4-8ae5-4c90-8ccf-806a4056e883 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 722.521586] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.642960] env[62627]: DEBUG nova.network.neutron [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Successfully updated port: 246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 722.884689] env[62627]: DEBUG nova.compute.utils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 722.886859] env[62627]: DEBUG nova.compute.manager [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Not allocating networking since 'none' was specified. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 723.019194] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072635} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.019574] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 723.020375] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8c9e285-1506-4705-86f6-a83b3952573d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.028545] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 723.028545] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d5375d-e84b-8108-7644-7926032083e5" [ 723.028545] env[62627]: _type = "Task" [ 723.028545] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.040263] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d5375d-e84b-8108-7644-7926032083e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.111424] env[62627]: DEBUG nova.network.neutron [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Updating instance_info_cache with network_info: [{"id": "db80642f-6b36-430d-badf-9ba7dacbbad6", "address": "fa:16:3e:6e:9c:d6", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb80642f-6b", "ovs_interfaceid": "db80642f-6b36-430d-badf-9ba7dacbbad6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.128975] env[62627]: DEBUG nova.network.neutron [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Updated VIF entry in instance network info cache for port a1a51631-0dc2-427d-a041-f4c9f944ea3d. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 723.128975] env[62627]: DEBUG nova.network.neutron [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Updating instance_info_cache with network_info: [{"id": "a1a51631-0dc2-427d-a041-f4c9f944ea3d", "address": "fa:16:3e:b2:1d:31", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1a51631-0d", "ovs_interfaceid": "a1a51631-0dc2-427d-a041-f4c9f944ea3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.147750] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.147750] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquired lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.147750] env[62627]: DEBUG nova.network.neutron [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 723.391335] env[62627]: DEBUG nova.compute.manager [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 723.541529] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d5375d-e84b-8108-7644-7926032083e5, 'name': SearchDatastore_Task, 'duration_secs': 0.010935} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.544690] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.544957] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0/5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 723.545459] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.548130] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 723.548130] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa46ece3-d54a-48a7-9d57-2ba1a2fcd4f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.548130] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0af20564-53bf-4ede-bc15-2d2ef890b78b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.561462] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 723.561462] env[62627]: value = "task-2193909" [ 723.561462] env[62627]: _type = "Task" [ 723.561462] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.567106] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 723.567106] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 723.571888] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5495ec1b-8ee8-4b6b-97b2-2d84099f64e7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.581942] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193909, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.587077] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 723.587077] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52793821-6a23-fc51-0efb-ae1d2a4adbf5" [ 723.587077] env[62627]: _type = "Task" [ 723.587077] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.602397] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52793821-6a23-fc51-0efb-ae1d2a4adbf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.609364] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a608d6b-b5de-404d-bd8f-ddb05bab217c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.614528] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Releasing lock "refresh_cache-f15982c3-23ed-4f25-bf55-3418cac117cc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.614847] env[62627]: DEBUG nova.compute.manager [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Instance network_info: |[{"id": "db80642f-6b36-430d-badf-9ba7dacbbad6", "address": "fa:16:3e:6e:9c:d6", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb80642f-6b", "ovs_interfaceid": "db80642f-6b36-430d-badf-9ba7dacbbad6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 723.616141] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:9c:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db80642f-6b36-430d-badf-9ba7dacbbad6', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 723.624404] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Creating folder: Project (8f2de9bc7c3a481ba6c90634dc99d148). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 723.627021] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e981dd7-2a17-4d0f-b115-097b08f6aab5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.630777] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6217344c-aad4-44ac-8387-965b4de09da4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.635820] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] Releasing lock "refresh_cache-5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.636615] env[62627]: DEBUG nova.compute.manager [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Received event network-vif-plugged-79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 723.636716] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] Acquiring lock "77418dff-6bdc-4e0a-954a-45c80960dbf1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.637273] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] Lock "77418dff-6bdc-4e0a-954a-45c80960dbf1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.637456] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] Lock "77418dff-6bdc-4e0a-954a-45c80960dbf1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.637892] env[62627]: DEBUG nova.compute.manager [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] No waiting events found dispatching network-vif-plugged-79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 723.637892] env[62627]: WARNING nova.compute.manager [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Received unexpected event network-vif-plugged-79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7 for instance with vm_state building and task_state spawning. [ 723.638089] env[62627]: DEBUG nova.compute.manager [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Received event network-changed-79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 723.638242] env[62627]: DEBUG nova.compute.manager [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Refreshing instance network info cache due to event network-changed-79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 723.638650] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] Acquiring lock "refresh_cache-77418dff-6bdc-4e0a-954a-45c80960dbf1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.638812] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] Acquired lock "refresh_cache-77418dff-6bdc-4e0a-954a-45c80960dbf1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.639040] env[62627]: DEBUG nova.network.neutron [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Refreshing network info cache for port 79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 723.676250] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a29059-a203-4dfa-a529-a85a87025cfa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.681608] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Created folder: Project (8f2de9bc7c3a481ba6c90634dc99d148) in parent group-v447541. [ 723.681810] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Creating folder: Instances. Parent ref: group-v447552. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 723.682813] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27aae4a6-031b-4880-8923-62e48993b64f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.690328] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bffcb7-7d21-4683-be96-3319cf1083f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.696295] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Created folder: Instances in parent group-v447552. [ 723.696866] env[62627]: DEBUG oslo.service.loopingcall [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 723.696866] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 723.697037] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-915aa884-d5f6-44f0-a009-5b175ac4b5a7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.720201] env[62627]: DEBUG nova.compute.provider_tree [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.729612] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 723.729612] env[62627]: value = "task-2193912" [ 723.729612] env[62627]: _type = "Task" [ 723.729612] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.741270] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193912, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.897732] env[62627]: DEBUG nova.network.neutron [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.087359] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193909, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.113517] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52793821-6a23-fc51-0efb-ae1d2a4adbf5, 'name': SearchDatastore_Task, 'duration_secs': 0.020593} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.114471] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adf46052-2e27-4205-9d40-3cf3d446250e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.120543] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 724.120543] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52298867-f40a-464a-38b4-18bc4edc8bc7" [ 724.120543] env[62627]: _type = "Task" [ 724.120543] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.131634] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52298867-f40a-464a-38b4-18bc4edc8bc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.223992] env[62627]: DEBUG nova.scheduler.client.report [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 724.247292] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193912, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.411112] env[62627]: DEBUG nova.compute.manager [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 724.451895] env[62627]: DEBUG nova.virt.hardware [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 724.452633] env[62627]: DEBUG nova.virt.hardware [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.452633] env[62627]: DEBUG nova.virt.hardware [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 724.452633] env[62627]: DEBUG nova.virt.hardware [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.452728] env[62627]: DEBUG nova.virt.hardware [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 724.452880] env[62627]: DEBUG nova.virt.hardware [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 724.453111] env[62627]: DEBUG nova.virt.hardware [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 724.453278] env[62627]: DEBUG nova.virt.hardware [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 724.453441] env[62627]: DEBUG nova.virt.hardware [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 724.453622] env[62627]: DEBUG nova.virt.hardware [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 724.453820] env[62627]: DEBUG nova.virt.hardware [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 724.454767] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b75fee5-844a-4da9-aca3-376c46995ca6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.469301] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d27ef5-ef13-4bb4-aab7-09615094b4d7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.489182] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 724.496318] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Creating folder: Project (de4e717c8a704d64b80b311f73da5cee). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.498064] env[62627]: DEBUG nova.network.neutron [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updating instance_info_cache with network_info: [{"id": "246fd9bd-81de-4162-b744-58c17169a80f", "address": "fa:16:3e:9e:4a:5e", "network": {"id": "5d65934a-2966-4094-bc55-817726caaea5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1246247596-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12d6c9026644c9cb45d687b92161121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246fd9bd-81", "ovs_interfaceid": "246fd9bd-81de-4162-b744-58c17169a80f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.498670] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-daee9f7d-c41c-47e3-813d-187f69af9913 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.511698] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Created folder: Project (de4e717c8a704d64b80b311f73da5cee) in parent group-v447541. [ 724.513342] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Creating folder: Instances. Parent ref: group-v447555. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.513342] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b61d96b4-034c-4ac2-80d2-acaf80041417 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.526858] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Created folder: Instances in parent group-v447555. [ 724.527126] env[62627]: DEBUG oslo.service.loopingcall [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 724.528223] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 724.528223] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-364abb65-a160-4335-966a-016ffa77f2c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.546904] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 724.546904] env[62627]: value = "task-2193915" [ 724.546904] env[62627]: _type = "Task" [ 724.546904] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.560819] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193915, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.578777] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193909, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526374} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.579670] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0/5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 724.579994] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 724.580635] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0041ac36-f3b5-4fd7-b16e-c0b85863418f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.591317] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 724.591317] env[62627]: value = "task-2193916" [ 724.591317] env[62627]: _type = "Task" [ 724.591317] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.602406] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193916, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.633805] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52298867-f40a-464a-38b4-18bc4edc8bc7, 'name': SearchDatastore_Task, 'duration_secs': 0.009434} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.634473] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.634473] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 77418dff-6bdc-4e0a-954a-45c80960dbf1/77418dff-6bdc-4e0a-954a-45c80960dbf1.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 724.635063] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7aa0e72d-47f4-40b2-a7e7-819fb6ed8832 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.646902] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 724.646902] env[62627]: value = "task-2193917" [ 724.646902] env[62627]: _type = "Task" [ 724.646902] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.655633] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193917, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.729862] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.351s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.732749] env[62627]: DEBUG nova.compute.manager [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 724.735308] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.707s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.739329] env[62627]: INFO nova.compute.claims [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.751920] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193912, 'name': CreateVM_Task, 'duration_secs': 0.802126} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.751920] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 724.751920] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.751920] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.751920] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 724.752153] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1debdd83-e7c1-4901-82fd-eb01faa13d65 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.757624] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for the task: (returnval){ [ 724.757624] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224b923-74f6-b075-5e20-051777757277" [ 724.757624] env[62627]: _type = "Task" [ 724.757624] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.769233] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224b923-74f6-b075-5e20-051777757277, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.002398] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Releasing lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.003066] env[62627]: DEBUG nova.compute.manager [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Instance network_info: |[{"id": "246fd9bd-81de-4162-b744-58c17169a80f", "address": "fa:16:3e:9e:4a:5e", "network": {"id": "5d65934a-2966-4094-bc55-817726caaea5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1246247596-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12d6c9026644c9cb45d687b92161121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246fd9bd-81", "ovs_interfaceid": "246fd9bd-81de-4162-b744-58c17169a80f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 725.003274] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:4a:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '246fd9bd-81de-4162-b744-58c17169a80f', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.013271] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Creating folder: Project (b12d6c9026644c9cb45d687b92161121). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 725.013668] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d95e8ec-745f-4414-b7db-54893a2bbe06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.026760] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Created folder: Project (b12d6c9026644c9cb45d687b92161121) in parent group-v447541. [ 725.027055] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Creating folder: Instances. Parent ref: group-v447558. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 725.028030] env[62627]: DEBUG nova.network.neutron [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Updated VIF entry in instance network info cache for port 79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 725.028401] env[62627]: DEBUG nova.network.neutron [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Updating instance_info_cache with network_info: [{"id": "79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7", "address": "fa:16:3e:c1:30:1f", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.172", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79ffbb02-e4", "ovs_interfaceid": "79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.029849] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14da07c6-b5cb-4141-aac1-27eb9e5b15cf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.043360] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Created folder: Instances in parent group-v447558. [ 725.043623] env[62627]: DEBUG oslo.service.loopingcall [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.044547] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 725.044762] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7216c19a-1287-4090-b396-d7dcee12383f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.075625] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193915, 'name': CreateVM_Task, 'duration_secs': 0.423085} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.075625] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 725.075625] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.075625] env[62627]: value = "task-2193920" [ 725.075625] env[62627]: _type = "Task" [ 725.075625] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.075625] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.085537] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193920, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.101647] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193916, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074388} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.102131] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 725.103435] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0d145d-6bba-4ee1-b434-f908a5ac23f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.139120] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0/5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 725.140481] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ed6399a-d495-406c-8fe5-c03eb7387465 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.174463] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193917, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.176262] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 725.176262] env[62627]: value = "task-2193921" [ 725.176262] env[62627]: _type = "Task" [ 725.176262] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.195346] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193921, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.244402] env[62627]: DEBUG nova.compute.utils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 725.245978] env[62627]: DEBUG nova.compute.manager [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 725.246167] env[62627]: DEBUG nova.network.neutron [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 725.273317] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224b923-74f6-b075-5e20-051777757277, 'name': SearchDatastore_Task, 'duration_secs': 0.011299} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.273644] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.273941] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 725.274246] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.274449] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.274681] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 725.275014] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.275387] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 725.275671] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fb75ab6-95cd-4d92-9b6f-e9684760ec3b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.278820] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff9007ad-1834-4c80-b9a9-493d6a5cc1ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.285282] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for the task: (returnval){ [ 725.285282] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5286072b-d252-0e56-f2c4-3757ee5422ab" [ 725.285282] env[62627]: _type = "Task" [ 725.285282] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.290900] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 725.292064] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 725.292421] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a6bb266-cfcf-48f0-856b-26b05c5b8452 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.298349] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5286072b-d252-0e56-f2c4-3757ee5422ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.302067] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for the task: (returnval){ [ 725.302067] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e314d1-b3d2-75ad-2ecb-55413faaa657" [ 725.302067] env[62627]: _type = "Task" [ 725.302067] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.311482] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e314d1-b3d2-75ad-2ecb-55413faaa657, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.464116] env[62627]: DEBUG nova.policy [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c72c1ac25b9f4aeea2c5fb37afd37598', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14238d8125674b63a7cf4452cb260283', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 725.492605] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquiring lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.494336] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.497748] env[62627]: DEBUG nova.network.neutron [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Successfully updated port: a7d230b4-8ae5-4c90-8ccf-806a4056e883 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 725.533242] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d667597-f0d1-4778-b2f1-60b0474c46f0 req-d80a85c5-e762-4f63-90bb-b319be594e22 service nova] Releasing lock "refresh_cache-77418dff-6bdc-4e0a-954a-45c80960dbf1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.588498] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193920, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.675187] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193917, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576635} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.675458] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 77418dff-6bdc-4e0a-954a-45c80960dbf1/77418dff-6bdc-4e0a-954a-45c80960dbf1.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 725.675664] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 725.675914] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48c130c7-e895-4473-ae1a-2f42e5e4ae62 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.687327] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193921, 'name': ReconfigVM_Task, 'duration_secs': 0.312192} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.688713] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0/5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 725.691896] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 725.691896] env[62627]: value = "task-2193922" [ 725.691896] env[62627]: _type = "Task" [ 725.691896] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.691896] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-685afd13-9e9f-4d1d-afe1-e611793a889b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.701206] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193922, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.703165] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 725.703165] env[62627]: value = "task-2193923" [ 725.703165] env[62627]: _type = "Task" [ 725.703165] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.713124] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193923, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.751929] env[62627]: DEBUG nova.compute.manager [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 725.801482] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5286072b-d252-0e56-f2c4-3757ee5422ab, 'name': SearchDatastore_Task, 'duration_secs': 0.019426} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.801689] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.802034] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 725.802159] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.815089] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e314d1-b3d2-75ad-2ecb-55413faaa657, 'name': SearchDatastore_Task, 'duration_secs': 0.018023} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.816075] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ec81e1c-8f30-4dba-bc6a-2396eeba8a49 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.822525] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for the task: (returnval){ [ 725.822525] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5225f07d-f834-9c8d-167e-335f4e845bbd" [ 725.822525] env[62627]: _type = "Task" [ 725.822525] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.841132] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5225f07d-f834-9c8d-167e-335f4e845bbd, 'name': SearchDatastore_Task} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.841632] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.842197] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f15982c3-23ed-4f25-bf55-3418cac117cc/f15982c3-23ed-4f25-bf55-3418cac117cc.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 725.842716] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.843299] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 725.843730] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff6ee44c-0175-4949-89e9-64ea589a2936 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.849858] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c819ee2-aeb1-4068-adfb-9b9e6ee4bd6b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.861078] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for the task: (returnval){ [ 725.861078] env[62627]: value = "task-2193924" [ 725.861078] env[62627]: _type = "Task" [ 725.861078] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.861274] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 725.861430] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 725.865310] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dfde14e-e13a-4063-ac96-aec0f6c67d58 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.876668] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.882688] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for the task: (returnval){ [ 725.882688] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522ba1e4-e839-be44-c324-ecec47db945e" [ 725.882688] env[62627]: _type = "Task" [ 725.882688] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.895873] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522ba1e4-e839-be44-c324-ecec47db945e, 'name': SearchDatastore_Task, 'duration_secs': 0.017993} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.896685] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6db26657-a3a3-4df3-8a05-975fffdef922 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.906615] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for the task: (returnval){ [ 725.906615] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52079003-c7e8-c792-4c1b-3543b1325b9c" [ 725.906615] env[62627]: _type = "Task" [ 725.906615] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.917060] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52079003-c7e8-c792-4c1b-3543b1325b9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.942930] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "33bf3932-3746-445b-b0fe-611466cdd9b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.943299] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "33bf3932-3746-445b-b0fe-611466cdd9b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.996392] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "e8c87637-4a30-4482-af21-f58ad8a218fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.996743] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "e8c87637-4a30-4482-af21-f58ad8a218fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.005393] env[62627]: DEBUG nova.compute.manager [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 726.009705] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.009859] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.010339] env[62627]: DEBUG nova.network.neutron [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 726.057246] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c83b768-4e87-4fdc-88e7-4043d4b44f14 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.068495] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d4dd06-2b31-406f-89ce-71b8f7a16bf5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.106259] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea71cdb8-9410-43a0-8067-fc5fe61c124f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.118751] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193920, 'name': CreateVM_Task, 'duration_secs': 0.538844} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.121165] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 726.122545] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.122545] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.122738] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 726.124116] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff6953e-d2f6-4532-8396-f83ff9338491 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.128032] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d9d0a0f-cdba-4e67-be12-f9e56da208be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.143505] env[62627]: DEBUG nova.compute.provider_tree [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 726.147446] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 726.147446] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520fa9b5-d61c-cb9e-0812-0e1cf0dbddae" [ 726.147446] env[62627]: _type = "Task" [ 726.147446] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.149819] env[62627]: DEBUG nova.compute.manager [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Received event network-vif-plugged-db80642f-6b36-430d-badf-9ba7dacbbad6 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 726.149819] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Acquiring lock "f15982c3-23ed-4f25-bf55-3418cac117cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.149997] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Lock "f15982c3-23ed-4f25-bf55-3418cac117cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.150186] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Lock "f15982c3-23ed-4f25-bf55-3418cac117cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.150355] env[62627]: DEBUG nova.compute.manager [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] No waiting events found dispatching network-vif-plugged-db80642f-6b36-430d-badf-9ba7dacbbad6 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 726.150545] env[62627]: WARNING nova.compute.manager [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Received unexpected event network-vif-plugged-db80642f-6b36-430d-badf-9ba7dacbbad6 for instance with vm_state building and task_state spawning. [ 726.150692] env[62627]: DEBUG nova.compute.manager [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Received event network-changed-db80642f-6b36-430d-badf-9ba7dacbbad6 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 726.150844] env[62627]: DEBUG nova.compute.manager [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Refreshing instance network info cache due to event network-changed-db80642f-6b36-430d-badf-9ba7dacbbad6. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 726.151036] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Acquiring lock "refresh_cache-f15982c3-23ed-4f25-bf55-3418cac117cc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.151167] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Acquired lock "refresh_cache-f15982c3-23ed-4f25-bf55-3418cac117cc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.151412] env[62627]: DEBUG nova.network.neutron [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Refreshing network info cache for port db80642f-6b36-430d-badf-9ba7dacbbad6 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 726.163370] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520fa9b5-d61c-cb9e-0812-0e1cf0dbddae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.203483] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193922, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071965} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.204254] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 726.210450] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ebb042-ac12-4ce6-9c87-22e6d1563406 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.232653] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 77418dff-6bdc-4e0a-954a-45c80960dbf1/77418dff-6bdc-4e0a-954a-45c80960dbf1.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 726.236212] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d02b646c-217c-43f8-85f2-b978daedb629 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.251023] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193923, 'name': Rename_Task, 'duration_secs': 0.150636} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.251216] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 726.251846] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96e3c0ed-ef2f-4865-9106-9c9e8bb7e0c7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.258443] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 726.258443] env[62627]: value = "task-2193926" [ 726.258443] env[62627]: _type = "Task" [ 726.258443] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.259985] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 726.259985] env[62627]: value = "task-2193927" [ 726.259985] env[62627]: _type = "Task" [ 726.259985] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.278783] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.281806] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193927, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.374996] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquiring lock "63e884c4-2cc1-431d-8a7f-1c4259b29e1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.375466] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "63e884c4-2cc1-431d-8a7f-1c4259b29e1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.375466] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193924, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.422446] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52079003-c7e8-c792-4c1b-3543b1325b9c, 'name': SearchDatastore_Task, 'duration_secs': 0.012171} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.422446] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.422725] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 1298559d-14fb-422c-8206-a35a747f0c06/1298559d-14fb-422c-8206-a35a747f0c06.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 726.423389] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42872a3b-b41e-4819-8a79-086542299ab8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.434475] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for the task: (returnval){ [ 726.434475] env[62627]: value = "task-2193928" [ 726.434475] env[62627]: _type = "Task" [ 726.434475] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.450426] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193928, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.539585] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.615218] env[62627]: DEBUG nova.network.neutron [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.673132] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520fa9b5-d61c-cb9e-0812-0e1cf0dbddae, 'name': SearchDatastore_Task, 'duration_secs': 0.07538} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.673132] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.673132] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.673132] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.673340] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.673473] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.673777] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a41840d2-76b5-4c2c-aed0-8da0a3f425df {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.677463] env[62627]: ERROR nova.scheduler.client.report [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [req-73508957-c799-4959-ad57-eaca7ae24e3d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-73508957-c799-4959-ad57-eaca7ae24e3d"}]} [ 726.699080] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.699294] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 726.700964] env[62627]: DEBUG nova.scheduler.client.report [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 726.703904] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22cb3625-f2e2-4d08-ad48-fe8abbb1f831 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.711781] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 726.711781] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d9b2f3-2530-1bed-669d-fb8fc5f36d88" [ 726.711781] env[62627]: _type = "Task" [ 726.711781] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.722586] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d9b2f3-2530-1bed-669d-fb8fc5f36d88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.735471] env[62627]: DEBUG nova.scheduler.client.report [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 726.735719] env[62627]: DEBUG nova.compute.provider_tree [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 726.754313] env[62627]: DEBUG nova.scheduler.client.report [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 726.777225] env[62627]: DEBUG nova.compute.manager [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 726.779997] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.789056] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193927, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.790469] env[62627]: DEBUG nova.scheduler.client.report [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 726.817780] env[62627]: DEBUG nova.virt.hardware [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 726.817866] env[62627]: DEBUG nova.virt.hardware [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.818378] env[62627]: DEBUG nova.virt.hardware [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 726.818378] env[62627]: DEBUG nova.virt.hardware [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.818487] env[62627]: DEBUG nova.virt.hardware [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 726.818970] env[62627]: DEBUG nova.virt.hardware [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 726.818970] env[62627]: DEBUG nova.virt.hardware [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 726.819107] env[62627]: DEBUG nova.virt.hardware [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 726.819200] env[62627]: DEBUG nova.virt.hardware [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 726.819364] env[62627]: DEBUG nova.virt.hardware [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 726.819532] env[62627]: DEBUG nova.virt.hardware [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 726.820527] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b3b0a9-1a45-43b3-a125-fb873f52c177 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.833285] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cdd70a-23ed-4656-a146-e81dbc88cb6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.874049] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627288} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.882181] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f15982c3-23ed-4f25-bf55-3418cac117cc/f15982c3-23ed-4f25-bf55-3418cac117cc.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 726.882181] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 726.882672] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6715e63f-8f2e-429b-a1c0-68b344b11597 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.898337] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for the task: (returnval){ [ 726.898337] env[62627]: value = "task-2193929" [ 726.898337] env[62627]: _type = "Task" [ 726.898337] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.914209] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193929, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.945271] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193928, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.104328] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22dc5ba1-a4df-4e4d-a041-2eec06904efd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.115858] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d9aa6b-c1fa-4c62-b125-dc0672221402 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.152752] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634a1ffb-8168-4f35-a01a-f4ad38a7af6e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.163818] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73bac8a-ec7e-4afd-9fbf-8bbe5acecd3f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.181143] env[62627]: DEBUG nova.compute.provider_tree [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 727.231374] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d9b2f3-2530-1bed-669d-fb8fc5f36d88, 'name': SearchDatastore_Task, 'duration_secs': 0.056917} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.232791] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d80a995e-25e4-4870-a3ff-9284edadd76c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.240666] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 727.240666] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5207bf9e-08f8-3cc1-b719-268aed56d0d5" [ 727.240666] env[62627]: _type = "Task" [ 727.240666] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.252077] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5207bf9e-08f8-3cc1-b719-268aed56d0d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.255739] env[62627]: DEBUG nova.network.neutron [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance_info_cache with network_info: [{"id": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "address": "fa:16:3e:4c:84:35", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.153", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d230b4-8a", "ovs_interfaceid": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.275464] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193926, 'name': ReconfigVM_Task, 'duration_secs': 0.69177} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.279964] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 77418dff-6bdc-4e0a-954a-45c80960dbf1/77418dff-6bdc-4e0a-954a-45c80960dbf1.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 727.280880] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-128db553-3616-4fb9-b61c-29a1dbeb1d26 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.292412] env[62627]: DEBUG oslo_vmware.api [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2193927, 'name': PowerOnVM_Task, 'duration_secs': 0.658699} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.294011] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 727.294476] env[62627]: INFO nova.compute.manager [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Took 14.44 seconds to spawn the instance on the hypervisor. [ 727.294729] env[62627]: DEBUG nova.compute.manager [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 727.295389] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 727.295389] env[62627]: value = "task-2193930" [ 727.295389] env[62627]: _type = "Task" [ 727.295389] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.296350] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8e936e-90dc-4964-92f2-2deea1a6c781 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.311281] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193930, 'name': Rename_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.376253] env[62627]: DEBUG nova.network.neutron [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Successfully created port: 81a799c8-db8c-4bae-8609-d4bf5b56c600 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.411674] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193929, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124425} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.411963] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 727.413788] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22868959-5468-405b-bf42-2af075abe64e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.442810] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] f15982c3-23ed-4f25-bf55-3418cac117cc/f15982c3-23ed-4f25-bf55-3418cac117cc.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 727.445575] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d6eec76-6742-4126-b749-d27612a388c3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.470739] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193928, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566492} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.471519] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 1298559d-14fb-422c-8206-a35a747f0c06/1298559d-14fb-422c-8206-a35a747f0c06.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 727.471758] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 727.472062] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for the task: (returnval){ [ 727.472062] env[62627]: value = "task-2193931" [ 727.472062] env[62627]: _type = "Task" [ 727.472062] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.472361] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8155f69-d05c-4b96-94ab-6b80b599674a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.483931] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193931, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.485349] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for the task: (returnval){ [ 727.485349] env[62627]: value = "task-2193932" [ 727.485349] env[62627]: _type = "Task" [ 727.485349] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.501560] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193932, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.589988] env[62627]: DEBUG nova.network.neutron [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Updated VIF entry in instance network info cache for port db80642f-6b36-430d-badf-9ba7dacbbad6. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 727.591029] env[62627]: DEBUG nova.network.neutron [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Updating instance_info_cache with network_info: [{"id": "db80642f-6b36-430d-badf-9ba7dacbbad6", "address": "fa:16:3e:6e:9c:d6", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb80642f-6b", "ovs_interfaceid": "db80642f-6b36-430d-badf-9ba7dacbbad6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.737212] env[62627]: DEBUG nova.scheduler.client.report [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 17 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 727.737212] env[62627]: DEBUG nova.compute.provider_tree [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 17 to 18 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 727.737212] env[62627]: DEBUG nova.compute.provider_tree [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 727.762345] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.762669] env[62627]: DEBUG nova.compute.manager [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Instance network_info: |[{"id": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "address": "fa:16:3e:4c:84:35", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.153", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d230b4-8a", "ovs_interfaceid": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 727.763317] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5207bf9e-08f8-3cc1-b719-268aed56d0d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009285} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.763402] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:84:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7d230b4-8ae5-4c90-8ccf-806a4056e883', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 727.776889] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Creating folder: Project (e4db2144e94840c4a422d2d933b4a65f). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.777293] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.777541] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] a674db15-ea4e-44c8-abc1-6af425286517/a674db15-ea4e-44c8-abc1-6af425286517.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 727.778457] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b72b50ea-5e6c-416e-b021-7be48e8004fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.780414] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da9f2bf1-b277-4572-b70c-499f081b249f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.793437] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 727.793437] env[62627]: value = "task-2193934" [ 727.793437] env[62627]: _type = "Task" [ 727.793437] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.799904] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Created folder: Project (e4db2144e94840c4a422d2d933b4a65f) in parent group-v447541. [ 727.800060] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Creating folder: Instances. Parent ref: group-v447561. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.804911] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23b0ddd2-89f2-4585-98e3-ad1d90d67bb7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.810948] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.822540] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193930, 'name': Rename_Task, 'duration_secs': 0.156528} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.824822] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 727.825197] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dcd65cb5-a532-468e-8d37-7fdef95bbf40 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.835139] env[62627]: INFO nova.compute.manager [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Took 19.24 seconds to build instance. [ 727.842381] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 727.842381] env[62627]: value = "task-2193936" [ 727.842381] env[62627]: _type = "Task" [ 727.842381] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.847594] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Created folder: Instances in parent group-v447561. [ 727.847898] env[62627]: DEBUG oslo.service.loopingcall [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 727.848529] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 727.848765] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7502fa9-9f95-4260-a1da-0e61ce61833c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.867971] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.874560] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 727.874560] env[62627]: value = "task-2193937" [ 727.874560] env[62627]: _type = "Task" [ 727.874560] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.884743] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193937, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.986045] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193931, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.997455] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193932, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068626} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.997902] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 727.998903] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf22f30-f228-44d1-a681-ef3df17d269a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.021992] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 1298559d-14fb-422c-8206-a35a747f0c06/1298559d-14fb-422c-8206-a35a747f0c06.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.022331] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d161196f-af63-408f-a755-ef6a9763af9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.047038] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for the task: (returnval){ [ 728.047038] env[62627]: value = "task-2193938" [ 728.047038] env[62627]: _type = "Task" [ 728.047038] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.062037] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193938, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.098260] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Releasing lock "refresh_cache-f15982c3-23ed-4f25-bf55-3418cac117cc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.100373] env[62627]: DEBUG nova.compute.manager [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Received event network-vif-plugged-246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 728.100373] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Acquiring lock "a674db15-ea4e-44c8-abc1-6af425286517-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.100489] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Lock "a674db15-ea4e-44c8-abc1-6af425286517-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.100959] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Lock "a674db15-ea4e-44c8-abc1-6af425286517-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.101459] env[62627]: DEBUG nova.compute.manager [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] No waiting events found dispatching network-vif-plugged-246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 728.101932] env[62627]: WARNING nova.compute.manager [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Received unexpected event network-vif-plugged-246fd9bd-81de-4162-b744-58c17169a80f for instance with vm_state building and task_state spawning. [ 728.102410] env[62627]: DEBUG nova.compute.manager [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Received event network-changed-246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 728.103876] env[62627]: DEBUG nova.compute.manager [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Refreshing instance network info cache due to event network-changed-246fd9bd-81de-4162-b744-58c17169a80f. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 728.104335] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Acquiring lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.104588] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Acquired lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.105121] env[62627]: DEBUG nova.network.neutron [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Refreshing network info cache for port 246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 728.247580] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.512s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.247812] env[62627]: DEBUG nova.compute.manager [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 728.251230] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.135s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.252870] env[62627]: INFO nova.compute.claims [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 728.310253] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193934, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.337234] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea905dee-4f2e-4aa9-8b62-5d5d5c044bea tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.754s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.356591] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193936, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.386882] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193937, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.413680] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquiring lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.414297] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.487869] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193931, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.559306] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.757518] env[62627]: DEBUG nova.compute.utils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 728.763378] env[62627]: DEBUG nova.compute.manager [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 728.763378] env[62627]: DEBUG nova.network.neutron [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 728.809246] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575705} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.809522] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] a674db15-ea4e-44c8-abc1-6af425286517/a674db15-ea4e-44c8-abc1-6af425286517.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 728.810035] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 728.810128] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-51416fc4-282d-4d3c-be0a-d0e1ac4c7879 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.819551] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 728.819551] env[62627]: value = "task-2193940" [ 728.819551] env[62627]: _type = "Task" [ 728.819551] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.832053] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193940, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.840980] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 728.858206] env[62627]: DEBUG oslo_vmware.api [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2193936, 'name': PowerOnVM_Task, 'duration_secs': 0.784735} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.858722] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 728.858946] env[62627]: INFO nova.compute.manager [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Took 13.70 seconds to spawn the instance on the hypervisor. [ 728.859874] env[62627]: DEBUG nova.compute.manager [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 728.860311] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb6b7bb-e3b8-45c7-9896-98bb086c9a2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.896690] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193937, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.937850] env[62627]: DEBUG nova.policy [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c72c1ac25b9f4aeea2c5fb37afd37598', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14238d8125674b63a7cf4452cb260283', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 728.991095] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193931, 'name': ReconfigVM_Task, 'duration_secs': 1.415944} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.993678] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Reconfigured VM instance instance-00000003 to attach disk [datastore2] f15982c3-23ed-4f25-bf55-3418cac117cc/f15982c3-23ed-4f25-bf55-3418cac117cc.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 728.993678] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14ad8c5c-3360-4b01-aff2-d2c2f0ef9e8b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.002942] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for the task: (returnval){ [ 729.002942] env[62627]: value = "task-2193941" [ 729.002942] env[62627]: _type = "Task" [ 729.002942] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.014919] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193941, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.060530] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193938, 'name': ReconfigVM_Task, 'duration_secs': 0.625857} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.060924] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 1298559d-14fb-422c-8206-a35a747f0c06/1298559d-14fb-422c-8206-a35a747f0c06.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 729.061598] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66b9e729-2009-4d66-a90c-69a8c072f5b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.070337] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for the task: (returnval){ [ 729.070337] env[62627]: value = "task-2193942" [ 729.070337] env[62627]: _type = "Task" [ 729.070337] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.080144] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193942, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.107144] env[62627]: DEBUG nova.compute.manager [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Received event network-vif-plugged-a7d230b4-8ae5-4c90-8ccf-806a4056e883 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 729.107144] env[62627]: DEBUG oslo_concurrency.lockutils [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] Acquiring lock "ab999825-4310-4fd8-81f0-6bb5dde4175b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.107144] env[62627]: DEBUG oslo_concurrency.lockutils [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.107459] env[62627]: DEBUG oslo_concurrency.lockutils [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.107459] env[62627]: DEBUG nova.compute.manager [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] No waiting events found dispatching network-vif-plugged-a7d230b4-8ae5-4c90-8ccf-806a4056e883 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 729.107670] env[62627]: WARNING nova.compute.manager [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Received unexpected event network-vif-plugged-a7d230b4-8ae5-4c90-8ccf-806a4056e883 for instance with vm_state building and task_state spawning. [ 729.107869] env[62627]: DEBUG nova.compute.manager [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Received event network-changed-a7d230b4-8ae5-4c90-8ccf-806a4056e883 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 729.108080] env[62627]: DEBUG nova.compute.manager [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Refreshing instance network info cache due to event network-changed-a7d230b4-8ae5-4c90-8ccf-806a4056e883. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 729.108342] env[62627]: DEBUG oslo_concurrency.lockutils [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] Acquiring lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.108515] env[62627]: DEBUG oslo_concurrency.lockutils [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] Acquired lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.108715] env[62627]: DEBUG nova.network.neutron [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Refreshing network info cache for port a7d230b4-8ae5-4c90-8ccf-806a4056e883 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 729.268596] env[62627]: DEBUG nova.compute.manager [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 729.340143] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193940, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084546} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.340773] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 729.341991] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca722ce9-ed93-48fc-af92-96b3ce523e65 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.379340] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] a674db15-ea4e-44c8-abc1-6af425286517/a674db15-ea4e-44c8-abc1-6af425286517.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 729.383780] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.389183] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ccfc6df-3272-409a-a4d5-fd1f795b2f2e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.412053] env[62627]: INFO nova.compute.manager [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Took 19.90 seconds to build instance. [ 729.425538] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193937, 'name': CreateVM_Task, 'duration_secs': 1.321152} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.429190] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 729.429564] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 729.429564] env[62627]: value = "task-2193943" [ 729.429564] env[62627]: _type = "Task" [ 729.429564] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.430431] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.430628] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.431072] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 729.431845] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3cc941b-9320-4ce0-ac1f-f12a58337e6d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.448386] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 729.448386] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52efdc5d-64a7-e3d5-3f89-715e4b9c7a0c" [ 729.448386] env[62627]: _type = "Task" [ 729.448386] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.466983] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52efdc5d-64a7-e3d5-3f89-715e4b9c7a0c, 'name': SearchDatastore_Task, 'duration_secs': 0.016893} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.466983] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.467248] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 729.467595] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.467834] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.468142] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 729.468458] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6b8f7a3-2499-4211-84d8-3a2ce5763cb2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.474907] env[62627]: DEBUG nova.network.neutron [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updated VIF entry in instance network info cache for port 246fd9bd-81de-4162-b744-58c17169a80f. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 729.475194] env[62627]: DEBUG nova.network.neutron [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updating instance_info_cache with network_info: [{"id": "246fd9bd-81de-4162-b744-58c17169a80f", "address": "fa:16:3e:9e:4a:5e", "network": {"id": "5d65934a-2966-4094-bc55-817726caaea5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1246247596-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12d6c9026644c9cb45d687b92161121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246fd9bd-81", "ovs_interfaceid": "246fd9bd-81de-4162-b744-58c17169a80f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.484037] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 729.484037] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 729.484037] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77fc8da9-826f-496a-8dc1-65d5dcc640d7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.489988] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 729.489988] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a4eedb-bf55-03d3-5580-39e133d01a43" [ 729.489988] env[62627]: _type = "Task" [ 729.489988] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.500915] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a4eedb-bf55-03d3-5580-39e133d01a43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.514877] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193941, 'name': Rename_Task, 'duration_secs': 0.222167} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.514877] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 729.514877] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2992be31-95c1-40a3-95cd-71766066c4a4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.525578] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for the task: (returnval){ [ 729.525578] env[62627]: value = "task-2193944" [ 729.525578] env[62627]: _type = "Task" [ 729.525578] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.536526] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193944, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.582942] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193942, 'name': Rename_Task, 'duration_secs': 0.32472} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.583163] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 729.584350] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61055206-d6b6-4ee9-a944-28c277ab778f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.593861] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for the task: (returnval){ [ 729.593861] env[62627]: value = "task-2193945" [ 729.593861] env[62627]: _type = "Task" [ 729.593861] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.600817] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e018713d-55ef-46d2-a634-77a92eec1062 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.608707] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193945, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.613956] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0584afa4-3790-4e58-80e5-1762b5804a50 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.649308] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d0a240-7935-4eba-86d3-3a435ca0323b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.658202] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3eb09f0-39ea-4543-9f68-65f6e0ab7a9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.675207] env[62627]: DEBUG nova.compute.provider_tree [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.914637] env[62627]: DEBUG oslo_concurrency.lockutils [None req-79d9156d-fb45-40fb-9316-ac3a359e1d1c tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "77418dff-6bdc-4e0a-954a-45c80960dbf1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.418s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.945887] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193943, 'name': ReconfigVM_Task, 'duration_secs': 0.417026} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.946190] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Reconfigured VM instance instance-00000004 to attach disk [datastore2] a674db15-ea4e-44c8-abc1-6af425286517/a674db15-ea4e-44c8-abc1-6af425286517.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 729.946887] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a59fa48-3713-4092-b4cf-82b8087b3a25 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.956029] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 729.956029] env[62627]: value = "task-2193946" [ 729.956029] env[62627]: _type = "Task" [ 729.956029] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.969697] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193946, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.977806] env[62627]: DEBUG oslo_concurrency.lockutils [req-68bfab93-e2e5-42e0-b2c8-b68832ed6c48 req-9a688e62-4236-4f78-b4ed-34643eaa97e2 service nova] Releasing lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.010447] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a4eedb-bf55-03d3-5580-39e133d01a43, 'name': SearchDatastore_Task, 'duration_secs': 0.014166} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.011649] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f5db880-e840-4ac5-a9b8-311b38d99820 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.018953] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 730.018953] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5202d188-bc08-c0a5-06ba-d2c3a85fc014" [ 730.018953] env[62627]: _type = "Task" [ 730.018953] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.029071] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5202d188-bc08-c0a5-06ba-d2c3a85fc014, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.041484] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193944, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.108703] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193945, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.178935] env[62627]: DEBUG nova.scheduler.client.report [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 730.283386] env[62627]: DEBUG nova.compute.manager [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 730.317927] env[62627]: DEBUG nova.virt.hardware [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 730.318887] env[62627]: DEBUG nova.virt.hardware [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 730.318887] env[62627]: DEBUG nova.virt.hardware [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 730.318887] env[62627]: DEBUG nova.virt.hardware [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 730.318887] env[62627]: DEBUG nova.virt.hardware [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 730.319238] env[62627]: DEBUG nova.virt.hardware [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 730.319238] env[62627]: DEBUG nova.virt.hardware [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 730.319378] env[62627]: DEBUG nova.virt.hardware [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 730.319612] env[62627]: DEBUG nova.virt.hardware [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 730.319763] env[62627]: DEBUG nova.virt.hardware [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 730.320016] env[62627]: DEBUG nova.virt.hardware [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 730.321393] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cef350-c8a4-4e62-b732-490ef238c924 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.332167] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb70a250-7e1c-4bf9-bfca-1235d1ad0926 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.417859] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 730.471958] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193946, 'name': Rename_Task, 'duration_secs': 0.212852} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.472712] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 730.472993] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b44c290f-2811-4506-90a0-010ee830e070 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.481180] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 730.481180] env[62627]: value = "task-2193948" [ 730.481180] env[62627]: _type = "Task" [ 730.481180] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.491919] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.499274] env[62627]: DEBUG nova.network.neutron [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Successfully created port: fee6b373-fb16-4c75-8b2d-d4df6e7e1952 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.531610] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5202d188-bc08-c0a5-06ba-d2c3a85fc014, 'name': SearchDatastore_Task, 'duration_secs': 0.014451} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.535346] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.535620] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] ab999825-4310-4fd8-81f0-6bb5dde4175b/ab999825-4310-4fd8-81f0-6bb5dde4175b.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 730.536208] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e56d948-681a-4a3a-adf9-7eafca44a619 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.544594] env[62627]: DEBUG oslo_vmware.api [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193944, 'name': PowerOnVM_Task, 'duration_secs': 0.683225} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.546046] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 730.546262] env[62627]: INFO nova.compute.manager [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Took 13.13 seconds to spawn the instance on the hypervisor. [ 730.546440] env[62627]: DEBUG nova.compute.manager [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.546936] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 730.546936] env[62627]: value = "task-2193949" [ 730.546936] env[62627]: _type = "Task" [ 730.546936] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.547651] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7f938f-311f-4be6-9e88-d66b887d48d0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.560297] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193949, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.607157] env[62627]: DEBUG oslo_vmware.api [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193945, 'name': PowerOnVM_Task, 'duration_secs': 0.68513} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.608173] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 730.609187] env[62627]: INFO nova.compute.manager [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Took 6.20 seconds to spawn the instance on the hypervisor. [ 730.609404] env[62627]: DEBUG nova.compute.manager [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.610439] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b61408-0fde-4885-b13f-674cc9bdd8a3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.615240] env[62627]: DEBUG nova.network.neutron [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updated VIF entry in instance network info cache for port a7d230b4-8ae5-4c90-8ccf-806a4056e883. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 730.615571] env[62627]: DEBUG nova.network.neutron [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance_info_cache with network_info: [{"id": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "address": "fa:16:3e:4c:84:35", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.153", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d230b4-8a", "ovs_interfaceid": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.658010] env[62627]: DEBUG nova.network.neutron [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Successfully updated port: 81a799c8-db8c-4bae-8609-d4bf5b56c600 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 730.685831] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.686868] env[62627]: DEBUG nova.compute.manager [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 730.689577] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 8.184s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.804035] env[62627]: DEBUG oslo_concurrency.lockutils [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Acquiring lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.804035] env[62627]: DEBUG oslo_concurrency.lockutils [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.804035] env[62627]: DEBUG oslo_concurrency.lockutils [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Acquiring lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.804035] env[62627]: DEBUG oslo_concurrency.lockutils [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.804464] env[62627]: DEBUG oslo_concurrency.lockutils [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.806704] env[62627]: INFO nova.compute.manager [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Terminating instance [ 730.961574] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.005203] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193948, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.064831] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193949, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.080609] env[62627]: INFO nova.compute.manager [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Took 21.11 seconds to build instance. [ 731.124071] env[62627]: DEBUG oslo_concurrency.lockutils [req-fbb8a94c-e590-42e1-aef2-afefb46de569 req-ad114856-649b-4a29-93e9-d76a6b3aa07f service nova] Releasing lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.132132] env[62627]: INFO nova.compute.manager [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Took 19.14 seconds to build instance. [ 731.160742] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "refresh_cache-7366e4b0-bdba-435d-844d-8e81c4464b31" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.161871] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "refresh_cache-7366e4b0-bdba-435d-844d-8e81c4464b31" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.161871] env[62627]: DEBUG nova.network.neutron [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 731.198440] env[62627]: DEBUG nova.compute.utils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 731.201927] env[62627]: DEBUG nova.compute.manager [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 731.202493] env[62627]: DEBUG nova.network.neutron [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 731.312314] env[62627]: DEBUG nova.compute.manager [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 731.312533] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.313448] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e91e16-17f9-4333-9606-7f9e5b978fea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.324449] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 731.324715] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca7aca4d-8c9b-4932-a556-c61688d3bbf7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.329540] env[62627]: DEBUG nova.policy [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27f56b55b5da498a94fe889d63538d53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca351451d2344fc1872f6faf89412354', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 731.333587] env[62627]: DEBUG oslo_vmware.api [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Waiting for the task: (returnval){ [ 731.333587] env[62627]: value = "task-2193950" [ 731.333587] env[62627]: _type = "Task" [ 731.333587] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.343342] env[62627]: DEBUG oslo_vmware.api [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Task: {'id': task-2193950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.498467] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193948, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.567030] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193949, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.62874} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.569104] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] ab999825-4310-4fd8-81f0-6bb5dde4175b/ab999825-4310-4fd8-81f0-6bb5dde4175b.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 731.569104] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 731.569104] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87dad122-7c9c-4f02-8509-8fdf8d51a184 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.583046] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 731.583046] env[62627]: value = "task-2193951" [ 731.583046] env[62627]: _type = "Task" [ 731.583046] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.583046] env[62627]: DEBUG oslo_concurrency.lockutils [None req-819b612f-52bd-4499-b82b-186d49c18c06 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Lock "f15982c3-23ed-4f25-bf55-3418cac117cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.622s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.600041] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193951, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.634445] env[62627]: DEBUG oslo_concurrency.lockutils [None req-70a411e5-fe7f-4e01-9a72-d58308d38863 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Lock "1298559d-14fb-422c-8206-a35a747f0c06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.653s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.709409] env[62627]: DEBUG nova.compute.manager [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 731.728602] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.728602] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 77418dff-6bdc-4e0a-954a-45c80960dbf1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.728602] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance f15982c3-23ed-4f25-bf55-3418cac117cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.728602] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance a674db15-ea4e-44c8-abc1-6af425286517 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.728812] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance ab999825-4310-4fd8-81f0-6bb5dde4175b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.728812] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 1298559d-14fb-422c-8206-a35a747f0c06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.728812] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 7366e4b0-bdba-435d-844d-8e81c4464b31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.728812] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance c1fd6afb-9332-446d-a441-5942b60edc94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.729000] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 77c60059-535a-47ca-a5b0-45cc1df93b45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 731.847857] env[62627]: DEBUG oslo_vmware.api [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Task: {'id': task-2193950, 'name': PowerOffVM_Task, 'duration_secs': 0.220736} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.847857] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 731.847857] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 731.847857] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fad17ef0-f394-4f2e-9b9a-4141aecc0799 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.928413] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.929105] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.931251] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Deleting the datastore file [datastore2] 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.931251] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50fff15d-5f04-4b5e-bb01-af13bf382e3b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.940165] env[62627]: DEBUG oslo_vmware.api [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Waiting for the task: (returnval){ [ 731.940165] env[62627]: value = "task-2193953" [ 731.940165] env[62627]: _type = "Task" [ 731.940165] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.949746] env[62627]: DEBUG oslo_vmware.api [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Task: {'id': task-2193953, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.965680] env[62627]: DEBUG nova.network.neutron [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.997240] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193948, 'name': PowerOnVM_Task} progress is 91%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.099114] env[62627]: DEBUG nova.compute.manager [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 732.101631] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193951, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080551} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.102682] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 732.103615] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036783b2-cada-4a3d-94af-29b25ad9a310 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.134879] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] ab999825-4310-4fd8-81f0-6bb5dde4175b/ab999825-4310-4fd8-81f0-6bb5dde4175b.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 732.142524] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47c96fa0-69b4-4d1e-985a-03331bb56cf1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.157418] env[62627]: DEBUG nova.compute.manager [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 732.169806] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 732.169806] env[62627]: value = "task-2193954" [ 732.169806] env[62627]: _type = "Task" [ 732.169806] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.180957] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193954, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.231606] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance db8ddb7b-b119-415c-98d9-dc5b3d2868b7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 732.263162] env[62627]: DEBUG nova.compute.manager [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Received event network-vif-plugged-81a799c8-db8c-4bae-8609-d4bf5b56c600 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 732.263631] env[62627]: DEBUG oslo_concurrency.lockutils [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] Acquiring lock "7366e4b0-bdba-435d-844d-8e81c4464b31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.264525] env[62627]: DEBUG oslo_concurrency.lockutils [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] Lock "7366e4b0-bdba-435d-844d-8e81c4464b31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.264776] env[62627]: DEBUG oslo_concurrency.lockutils [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] Lock "7366e4b0-bdba-435d-844d-8e81c4464b31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.264994] env[62627]: DEBUG nova.compute.manager [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] No waiting events found dispatching network-vif-plugged-81a799c8-db8c-4bae-8609-d4bf5b56c600 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 732.265232] env[62627]: WARNING nova.compute.manager [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Received unexpected event network-vif-plugged-81a799c8-db8c-4bae-8609-d4bf5b56c600 for instance with vm_state building and task_state spawning. [ 732.265710] env[62627]: DEBUG nova.compute.manager [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Received event network-changed-81a799c8-db8c-4bae-8609-d4bf5b56c600 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 732.265710] env[62627]: DEBUG nova.compute.manager [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Refreshing instance network info cache due to event network-changed-81a799c8-db8c-4bae-8609-d4bf5b56c600. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 732.266424] env[62627]: DEBUG oslo_concurrency.lockutils [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] Acquiring lock "refresh_cache-7366e4b0-bdba-435d-844d-8e81c4464b31" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.350844] env[62627]: DEBUG nova.compute.manager [None req-b37ea6db-cd4e-4d02-aeaa-c616a7b4fd2c tempest-ServerDiagnosticsV248Test-915452748 tempest-ServerDiagnosticsV248Test-915452748-project-admin] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 732.352131] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e21fcff-b249-4cab-a83f-7d81933ecb0b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.362161] env[62627]: INFO nova.compute.manager [None req-b37ea6db-cd4e-4d02-aeaa-c616a7b4fd2c tempest-ServerDiagnosticsV248Test-915452748 tempest-ServerDiagnosticsV248Test-915452748-project-admin] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Retrieving diagnostics [ 732.363015] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87455e76-7183-4c71-901d-49e333d84b24 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.455632] env[62627]: DEBUG oslo_vmware.api [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Task: {'id': task-2193953, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.496279] env[62627]: DEBUG oslo_vmware.api [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2193948, 'name': PowerOnVM_Task, 'duration_secs': 1.54926} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.496279] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 732.496279] env[62627]: INFO nova.compute.manager [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Took 12.84 seconds to spawn the instance on the hypervisor. [ 732.496279] env[62627]: DEBUG nova.compute.manager [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 732.497460] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8b85a6-8067-4008-8bf1-29d646068efd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.555139] env[62627]: DEBUG nova.network.neutron [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Updating instance_info_cache with network_info: [{"id": "81a799c8-db8c-4bae-8609-d4bf5b56c600", "address": "fa:16:3e:1f:8d:65", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81a799c8-db", "ovs_interfaceid": "81a799c8-db8c-4bae-8609-d4bf5b56c600", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.624766] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.683345] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193954, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.690997] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.695340] env[62627]: DEBUG nova.network.neutron [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Successfully created port: af7ffe26-3161-4ff9-abf9-eb8480443269 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 732.717993] env[62627]: DEBUG nova.compute.manager [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 732.733802] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 33bf3932-3746-445b-b0fe-611466cdd9b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 732.747502] env[62627]: DEBUG nova.virt.hardware [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 732.747954] env[62627]: DEBUG nova.virt.hardware [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.748349] env[62627]: DEBUG nova.virt.hardware [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 732.748437] env[62627]: DEBUG nova.virt.hardware [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.748758] env[62627]: DEBUG nova.virt.hardware [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 732.748874] env[62627]: DEBUG nova.virt.hardware [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 732.749259] env[62627]: DEBUG nova.virt.hardware [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 732.749343] env[62627]: DEBUG nova.virt.hardware [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 732.749448] env[62627]: DEBUG nova.virt.hardware [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 732.749832] env[62627]: DEBUG nova.virt.hardware [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 732.750079] env[62627]: DEBUG nova.virt.hardware [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 732.751408] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0085a90-2b1e-414d-b341-9c702018f1b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.762880] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1310bbdc-9e46-4666-9272-5a376b18e463 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.958982] env[62627]: DEBUG oslo_vmware.api [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Task: {'id': task-2193953, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.633376} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.959479] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 732.959910] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 732.960238] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 732.960547] env[62627]: INFO nova.compute.manager [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Took 1.65 seconds to destroy the instance on the hypervisor. [ 732.960932] env[62627]: DEBUG oslo.service.loopingcall [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 732.961289] env[62627]: DEBUG nova.compute.manager [-] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 732.961598] env[62627]: DEBUG nova.network.neutron [-] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 733.026898] env[62627]: INFO nova.compute.manager [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Took 22.54 seconds to build instance. [ 733.059286] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "refresh_cache-7366e4b0-bdba-435d-844d-8e81c4464b31" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.059364] env[62627]: DEBUG nova.compute.manager [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Instance network_info: |[{"id": "81a799c8-db8c-4bae-8609-d4bf5b56c600", "address": "fa:16:3e:1f:8d:65", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81a799c8-db", "ovs_interfaceid": "81a799c8-db8c-4bae-8609-d4bf5b56c600", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 733.059830] env[62627]: DEBUG oslo_concurrency.lockutils [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] Acquired lock "refresh_cache-7366e4b0-bdba-435d-844d-8e81c4464b31" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.059830] env[62627]: DEBUG nova.network.neutron [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Refreshing network info cache for port 81a799c8-db8c-4bae-8609-d4bf5b56c600 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 733.061049] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:8d:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81a799c8-db8c-4bae-8609-d4bf5b56c600', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 733.076618] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Creating folder: Project (14238d8125674b63a7cf4452cb260283). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 733.080468] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42f1fd1e-e111-4bac-a57e-7377999d34be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.096756] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Created folder: Project (14238d8125674b63a7cf4452cb260283) in parent group-v447541. [ 733.096991] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Creating folder: Instances. Parent ref: group-v447564. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 733.097274] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c03f060e-da36-43f2-b3dc-60a703255d41 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.113024] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Created folder: Instances in parent group-v447564. [ 733.113024] env[62627]: DEBUG oslo.service.loopingcall [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 733.113024] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 733.113024] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-552bc76b-bf31-4e4a-88de-0311dee7b26f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.137193] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 733.137193] env[62627]: value = "task-2193957" [ 733.137193] env[62627]: _type = "Task" [ 733.137193] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.147378] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193957, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.192981] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193954, 'name': ReconfigVM_Task, 'duration_secs': 0.671224} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.193534] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Reconfigured VM instance instance-00000005 to attach disk [datastore2] ab999825-4310-4fd8-81f0-6bb5dde4175b/ab999825-4310-4fd8-81f0-6bb5dde4175b.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 733.194430] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d93f8dc-f25f-4f90-a25d-7f43328998bb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.207838] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 733.207838] env[62627]: value = "task-2193958" [ 733.207838] env[62627]: _type = "Task" [ 733.207838] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.224107] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193958, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.242915] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance e8c87637-4a30-4482-af21-f58ad8a218fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 733.530739] env[62627]: DEBUG oslo_concurrency.lockutils [None req-630748af-c360-409a-9443-501eb2c40da1 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "a674db15-ea4e-44c8-abc1-6af425286517" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.055s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.649097] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193957, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.725072] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193958, 'name': Rename_Task, 'duration_secs': 0.255614} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.727119] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 733.727403] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25b95495-a732-4c03-863b-0a9dc939f22d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.739502] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 733.739502] env[62627]: value = "task-2193959" [ 733.739502] env[62627]: _type = "Task" [ 733.739502] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.747617] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 63e884c4-2cc1-431d-8a7f-1c4259b29e1a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 733.757082] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193959, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.847290] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.847290] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.150730] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193957, 'name': CreateVM_Task, 'duration_secs': 0.587289} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.150730] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 734.151036] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.151036] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.154347] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 734.154347] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77728c8f-2c14-4f92-80da-27c4efd948c7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.158360] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 734.158360] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52eaa050-cb85-ea29-2615-2965ce3ecd31" [ 734.158360] env[62627]: _type = "Task" [ 734.158360] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.169569] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52eaa050-cb85-ea29-2615-2965ce3ecd31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.259618] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 7691c2e2-4e43-4990-bf4d-2ef3298a4264 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.260039] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 734.260039] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 734.263785] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193959, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.349611] env[62627]: DEBUG nova.compute.manager [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 734.441139] env[62627]: DEBUG nova.network.neutron [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Updated VIF entry in instance network info cache for port 81a799c8-db8c-4bae-8609-d4bf5b56c600. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 734.441139] env[62627]: DEBUG nova.network.neutron [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Updating instance_info_cache with network_info: [{"id": "81a799c8-db8c-4bae-8609-d4bf5b56c600", "address": "fa:16:3e:1f:8d:65", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81a799c8-db", "ovs_interfaceid": "81a799c8-db8c-4bae-8609-d4bf5b56c600", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.496621] env[62627]: DEBUG nova.network.neutron [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Successfully updated port: fee6b373-fb16-4c75-8b2d-d4df6e7e1952 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 734.552811] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8064c9b4-eb44-4488-a3a2-9073f0fe6d7a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.565838] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6dcfada-5db9-479a-aaed-2dcf7ce12b3c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.605696] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5203a9c-db1b-480a-a59c-3b0dff361b92 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.616559] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2474a78b-8dc0-46fa-8370-7946a647772a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.635400] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.678744] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52eaa050-cb85-ea29-2615-2965ce3ecd31, 'name': SearchDatastore_Task, 'duration_secs': 0.023368} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.679072] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.681025] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 734.681025] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.681025] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.681025] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 734.681025] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62fc8c27-d525-468b-89e2-3e269a29a67e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.694459] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 734.694795] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 734.695898] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a41d916d-dfc1-406e-b360-345c36953ff4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.703455] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 734.703455] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5221bb27-2e9a-9ee2-13f9-24ce4f69a184" [ 734.703455] env[62627]: _type = "Task" [ 734.703455] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.716734] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5221bb27-2e9a-9ee2-13f9-24ce4f69a184, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.759052] env[62627]: DEBUG oslo_vmware.api [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2193959, 'name': PowerOnVM_Task, 'duration_secs': 0.861817} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.759052] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 734.759052] env[62627]: INFO nova.compute.manager [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Took 12.77 seconds to spawn the instance on the hypervisor. [ 734.759460] env[62627]: DEBUG nova.compute.manager [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 734.761292] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae9a5ba-0d56-4136-8dff-d2ced818e7c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.806087] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "a397bce6-30c7-485d-81ab-88a8e021996c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.806452] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.878363] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.943565] env[62627]: DEBUG oslo_concurrency.lockutils [req-24f7fc1e-ed90-4ef3-b819-6afa5aa068b6 req-dbe41b51-9f10-4a96-9baa-bcaceb7e43f7 service nova] Releasing lock "refresh_cache-7366e4b0-bdba-435d-844d-8e81c4464b31" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.956582] env[62627]: DEBUG nova.network.neutron [-] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.999724] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "refresh_cache-c1fd6afb-9332-446d-a441-5942b60edc94" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.999826] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "refresh_cache-c1fd6afb-9332-446d-a441-5942b60edc94" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.000173] env[62627]: DEBUG nova.network.neutron [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 735.142027] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 735.217128] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5221bb27-2e9a-9ee2-13f9-24ce4f69a184, 'name': SearchDatastore_Task, 'duration_secs': 0.027925} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.217373] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-960912f7-578a-479d-851e-738b0ac6401b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.226571] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 735.226571] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52444a39-aa9d-2671-e692-f88b70449971" [ 735.226571] env[62627]: _type = "Task" [ 735.226571] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.238559] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52444a39-aa9d-2671-e692-f88b70449971, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.287088] env[62627]: INFO nova.compute.manager [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Took 23.67 seconds to build instance. [ 735.459875] env[62627]: INFO nova.compute.manager [-] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Took 2.50 seconds to deallocate network for instance. [ 735.561747] env[62627]: DEBUG nova.compute.manager [req-61e4c2c1-fdab-410e-ba17-02c58b05a1f1 req-f6c0fa01-1b09-4dbd-82b5-eb8750410c14 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Received event network-vif-plugged-fee6b373-fb16-4c75-8b2d-d4df6e7e1952 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 735.561816] env[62627]: DEBUG oslo_concurrency.lockutils [req-61e4c2c1-fdab-410e-ba17-02c58b05a1f1 req-f6c0fa01-1b09-4dbd-82b5-eb8750410c14 service nova] Acquiring lock "c1fd6afb-9332-446d-a441-5942b60edc94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.562069] env[62627]: DEBUG oslo_concurrency.lockutils [req-61e4c2c1-fdab-410e-ba17-02c58b05a1f1 req-f6c0fa01-1b09-4dbd-82b5-eb8750410c14 service nova] Lock "c1fd6afb-9332-446d-a441-5942b60edc94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.562204] env[62627]: DEBUG oslo_concurrency.lockutils [req-61e4c2c1-fdab-410e-ba17-02c58b05a1f1 req-f6c0fa01-1b09-4dbd-82b5-eb8750410c14 service nova] Lock "c1fd6afb-9332-446d-a441-5942b60edc94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.562366] env[62627]: DEBUG nova.compute.manager [req-61e4c2c1-fdab-410e-ba17-02c58b05a1f1 req-f6c0fa01-1b09-4dbd-82b5-eb8750410c14 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] No waiting events found dispatching network-vif-plugged-fee6b373-fb16-4c75-8b2d-d4df6e7e1952 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 735.562583] env[62627]: WARNING nova.compute.manager [req-61e4c2c1-fdab-410e-ba17-02c58b05a1f1 req-f6c0fa01-1b09-4dbd-82b5-eb8750410c14 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Received unexpected event network-vif-plugged-fee6b373-fb16-4c75-8b2d-d4df6e7e1952 for instance with vm_state building and task_state spawning. [ 735.566813] env[62627]: DEBUG nova.network.neutron [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.648948] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 735.649582] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.960s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.649582] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.110s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.652182] env[62627]: INFO nova.compute.claims [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.744284] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52444a39-aa9d-2671-e692-f88b70449971, 'name': SearchDatastore_Task, 'duration_secs': 0.031225} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.744590] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.744857] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 735.745202] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-85363e4c-4784-48c9-9604-b7017d3132d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.756435] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 735.756435] env[62627]: value = "task-2193960" [ 735.756435] env[62627]: _type = "Task" [ 735.756435] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.779808] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193960, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.791019] env[62627]: DEBUG oslo_concurrency.lockutils [None req-69a0c14a-510b-417d-b71b-41bd6367b703 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.186s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.897464] env[62627]: DEBUG nova.network.neutron [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Updating instance_info_cache with network_info: [{"id": "fee6b373-fb16-4c75-8b2d-d4df6e7e1952", "address": "fa:16:3e:b2:8b:41", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfee6b373-fb", "ovs_interfaceid": "fee6b373-fb16-4c75-8b2d-d4df6e7e1952", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.969694] env[62627]: DEBUG oslo_concurrency.lockutils [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.019787] env[62627]: DEBUG nova.network.neutron [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Successfully updated port: af7ffe26-3161-4ff9-abf9-eb8480443269 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 736.271837] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193960, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.292531] env[62627]: DEBUG nova.compute.manager [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 736.402095] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "refresh_cache-c1fd6afb-9332-446d-a441-5942b60edc94" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.402095] env[62627]: DEBUG nova.compute.manager [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Instance network_info: |[{"id": "fee6b373-fb16-4c75-8b2d-d4df6e7e1952", "address": "fa:16:3e:b2:8b:41", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfee6b373-fb", "ovs_interfaceid": "fee6b373-fb16-4c75-8b2d-d4df6e7e1952", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 736.402679] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:8b:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fee6b373-fb16-4c75-8b2d-d4df6e7e1952', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 736.410751] env[62627]: DEBUG oslo.service.loopingcall [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 736.411072] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 736.411342] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec61cd04-75ee-4b12-901c-a0ea92193999 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.437311] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 736.437311] env[62627]: value = "task-2193961" [ 736.437311] env[62627]: _type = "Task" [ 736.437311] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.455287] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193961, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.531682] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.531682] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquired lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.531682] env[62627]: DEBUG nova.network.neutron [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 736.616778] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Acquiring lock "e5712962-ad8e-4cf2-aa47-275293609a13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.617120] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Lock "e5712962-ad8e-4cf2-aa47-275293609a13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.779881] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193960, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.714862} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.780193] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 736.780459] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 736.781708] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95f8b0d1-3d10-4f17-9ac0-97e2a5a2621a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.791666] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 736.791666] env[62627]: value = "task-2193962" [ 736.791666] env[62627]: _type = "Task" [ 736.791666] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.817577] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193962, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.854481] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.950902] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193961, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.043972] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11eedb5f-ebc0-4eb7-9828-446cc3a04def {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.055485] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464aef44-1a31-4f68-b938-40ecb09e0e6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.098295] env[62627]: DEBUG nova.network.neutron [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.102052] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e004e2aa-5026-4991-beaa-d35e0b3a0a27 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.113074] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7426678d-b75c-4c01-a48c-75ac8fce5651 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.129577] env[62627]: DEBUG nova.compute.provider_tree [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.269009] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.269009] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.314558] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193962, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085775} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.315693] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 737.318922] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd74fc13-2b61-4ee5-a3a2-0a00e6a19e30 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.322777] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "f09c685d-9103-4c94-a44a-266aacc334f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.322998] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "f09c685d-9103-4c94-a44a-266aacc334f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.350373] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.351364] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3e92956-f66f-464d-8a67-3fb74dbe76b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.373441] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "76fb58dc-df20-464b-bda0-dcf09d69c718" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.373661] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "76fb58dc-df20-464b-bda0-dcf09d69c718" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.375909] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 737.375909] env[62627]: value = "task-2193963" [ 737.375909] env[62627]: _type = "Task" [ 737.375909] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.386878] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193963, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.399596] env[62627]: DEBUG nova.network.neutron [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Updating instance_info_cache with network_info: [{"id": "af7ffe26-3161-4ff9-abf9-eb8480443269", "address": "fa:16:3e:15:cb:74", "network": {"id": "d75e7b13-f4ab-4626-9799-4970d691f831", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-781733128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca351451d2344fc1872f6faf89412354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf7ffe26-31", "ovs_interfaceid": "af7ffe26-3161-4ff9-abf9-eb8480443269", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.453563] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193961, 'name': CreateVM_Task, 'duration_secs': 0.619163} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.453887] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 737.454511] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.454663] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.454987] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 737.455333] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e030b9c8-6f2d-4455-aab3-a40c9118ac35 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.461435] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 737.461435] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d7d605-563b-739b-1dbd-f7964be41162" [ 737.461435] env[62627]: _type = "Task" [ 737.461435] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.471352] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d7d605-563b-739b-1dbd-f7964be41162, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.633196] env[62627]: DEBUG nova.scheduler.client.report [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 737.768339] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquiring lock "fffa23fe-3ce2-4879-b72d-11275d298504" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.768339] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Lock "fffa23fe-3ce2-4879-b72d-11275d298504" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.871490] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquiring lock "f15982c3-23ed-4f25-bf55-3418cac117cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.872156] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Lock "f15982c3-23ed-4f25-bf55-3418cac117cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.872156] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquiring lock "f15982c3-23ed-4f25-bf55-3418cac117cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.872301] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Lock "f15982c3-23ed-4f25-bf55-3418cac117cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.872594] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Lock "f15982c3-23ed-4f25-bf55-3418cac117cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.874986] env[62627]: INFO nova.compute.manager [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Terminating instance [ 737.892304] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193963, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.908072] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Releasing lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.908072] env[62627]: DEBUG nova.compute.manager [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Instance network_info: |[{"id": "af7ffe26-3161-4ff9-abf9-eb8480443269", "address": "fa:16:3e:15:cb:74", "network": {"id": "d75e7b13-f4ab-4626-9799-4970d691f831", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-781733128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca351451d2344fc1872f6faf89412354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf7ffe26-31", "ovs_interfaceid": "af7ffe26-3161-4ff9-abf9-eb8480443269", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 737.908996] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:cb:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af7ffe26-3161-4ff9-abf9-eb8480443269', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 737.916343] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Creating folder: Project (ca351451d2344fc1872f6faf89412354). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 737.917507] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5689c349-78a5-4329-b088-4d03c7f7f99f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.933332] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Created folder: Project (ca351451d2344fc1872f6faf89412354) in parent group-v447541. [ 737.933501] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Creating folder: Instances. Parent ref: group-v447568. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 737.933789] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1b4caec-f4e3-405b-9b04-3d2fdce3a9ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.947365] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Created folder: Instances in parent group-v447568. [ 737.947365] env[62627]: DEBUG oslo.service.loopingcall [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 737.947588] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 737.948136] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53e0e6c3-02b9-4e27-bf5f-34c57374e148 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.976597] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d7d605-563b-739b-1dbd-f7964be41162, 'name': SearchDatastore_Task, 'duration_secs': 0.023093} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.977964] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.978943] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.978943] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.978943] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.978943] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.979186] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 737.979186] env[62627]: value = "task-2193966" [ 737.979186] env[62627]: _type = "Task" [ 737.979186] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.979460] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d49842a-7be4-4417-ab38-2075a5ea9e29 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.990044] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193966, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.004420] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 738.004420] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 738.004420] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b674bea-8853-49ee-bb5b-c07f072aea3e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.011666] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 738.011666] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52480979-c539-7ec3-1ae4-e30065318fc3" [ 738.011666] env[62627]: _type = "Task" [ 738.011666] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.022360] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52480979-c539-7ec3-1ae4-e30065318fc3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.143687] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.493s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.144397] env[62627]: DEBUG nova.compute.manager [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 738.152023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.765s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.152023] env[62627]: INFO nova.compute.claims [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.269726] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "83998771-97a7-4458-834e-df1e4b2914ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.269851] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "83998771-97a7-4458-834e-df1e4b2914ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.382021] env[62627]: DEBUG nova.compute.manager [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 738.382021] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.382021] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4542f6-8369-4747-b242-c5b44fae4201 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.403867] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193963, 'name': ReconfigVM_Task, 'duration_secs': 0.553867} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.404907] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 738.404907] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 738.405213] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ede66024-ad53-4899-bd71-3c807c731016 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.407071] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93e62e77-c6ce-43fd-906f-403ef54e4cc0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.415454] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 738.415454] env[62627]: value = "task-2193967" [ 738.415454] env[62627]: _type = "Task" [ 738.415454] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.417039] env[62627]: DEBUG oslo_vmware.api [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for the task: (returnval){ [ 738.417039] env[62627]: value = "task-2193968" [ 738.417039] env[62627]: _type = "Task" [ 738.417039] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.436329] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193967, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.436742] env[62627]: DEBUG oslo_vmware.api [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.493145] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193966, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.525411] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52480979-c539-7ec3-1ae4-e30065318fc3, 'name': SearchDatastore_Task, 'duration_secs': 0.030763} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.527660] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbc5dbb9-2997-487b-80b2-2734f1362a61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.535150] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 738.535150] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a3f5e4-c7e4-bfde-6db2-144292ae7956" [ 738.535150] env[62627]: _type = "Task" [ 738.535150] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.547913] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a3f5e4-c7e4-bfde-6db2-144292ae7956, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.658395] env[62627]: DEBUG nova.compute.utils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 738.659603] env[62627]: DEBUG nova.compute.manager [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 738.661059] env[62627]: DEBUG nova.network.neutron [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 738.726737] env[62627]: DEBUG nova.policy [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ee59d5815f8463f95ff72104f77272c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65ae6a3964c44bcbae72d15466f6467e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 738.939312] env[62627]: DEBUG oslo_vmware.api [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193968, 'name': PowerOffVM_Task, 'duration_secs': 0.422208} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.940103] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193967, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.940526] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 738.940848] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.941206] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d555a8a-2cba-4f3b-8b2c-954a515de99a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.996452] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193966, 'name': CreateVM_Task, 'duration_secs': 0.608358} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.996452] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 738.997056] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.997250] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.997572] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 738.997825] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48882ddf-3c74-48cf-a11c-5084822272fe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.003677] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 739.003677] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b1747b-2ba8-a811-6187-c97a1a29d160" [ 739.003677] env[62627]: _type = "Task" [ 739.003677] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.014506] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b1747b-2ba8-a811-6187-c97a1a29d160, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.021421] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 739.021686] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 739.021885] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Deleting the datastore file [datastore2] f15982c3-23ed-4f25-bf55-3418cac117cc {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 739.022240] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31ec5f60-26ea-4de2-94d3-e0e75c185ed2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.031162] env[62627]: DEBUG oslo_vmware.api [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for the task: (returnval){ [ 739.031162] env[62627]: value = "task-2193970" [ 739.031162] env[62627]: _type = "Task" [ 739.031162] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.050068] env[62627]: DEBUG oslo_vmware.api [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193970, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.050340] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a3f5e4-c7e4-bfde-6db2-144292ae7956, 'name': SearchDatastore_Task, 'duration_secs': 0.014102} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.050578] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.050834] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] c1fd6afb-9332-446d-a441-5942b60edc94/c1fd6afb-9332-446d-a441-5942b60edc94.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 739.051082] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf34cfe6-bb59-43ed-b4ad-322c9725690a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.059027] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 739.059027] env[62627]: value = "task-2193971" [ 739.059027] env[62627]: _type = "Task" [ 739.059027] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.067636] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193971, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.111927] env[62627]: DEBUG nova.compute.manager [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Received event network-vif-deleted-a1a51631-0dc2-427d-a041-f4c9f944ea3d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 739.112479] env[62627]: DEBUG nova.compute.manager [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Received event network-changed-fee6b373-fb16-4c75-8b2d-d4df6e7e1952 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 739.112790] env[62627]: DEBUG nova.compute.manager [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Refreshing instance network info cache due to event network-changed-fee6b373-fb16-4c75-8b2d-d4df6e7e1952. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 739.113119] env[62627]: DEBUG oslo_concurrency.lockutils [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] Acquiring lock "refresh_cache-c1fd6afb-9332-446d-a441-5942b60edc94" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.113419] env[62627]: DEBUG oslo_concurrency.lockutils [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] Acquired lock "refresh_cache-c1fd6afb-9332-446d-a441-5942b60edc94" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.113481] env[62627]: DEBUG nova.network.neutron [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Refreshing network info cache for port fee6b373-fb16-4c75-8b2d-d4df6e7e1952 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 739.166574] env[62627]: DEBUG nova.compute.manager [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 739.241711] env[62627]: DEBUG nova.network.neutron [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Successfully created port: 32dda461-7395-4104-9323-4a39535875de {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.443590] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193967, 'name': Rename_Task, 'duration_secs': 0.731199} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.448470] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 739.448470] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e10b90d7-2dd6-4e2c-932a-5797b2c0a58a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.459995] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 739.459995] env[62627]: value = "task-2193972" [ 739.459995] env[62627]: _type = "Task" [ 739.459995] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.472804] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.473190] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.483069] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193972, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.517166] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b1747b-2ba8-a811-6187-c97a1a29d160, 'name': SearchDatastore_Task, 'duration_secs': 0.010651} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.517681] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.517948] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.518282] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.518487] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.518768] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 739.519145] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6cbcf097-4902-4cdd-b0e4-1ec879ec02ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.533628] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 739.533628] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 739.533628] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1af976d4-a929-4f61-aec1-30bda2a0a6de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.548995] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 739.548995] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52918f42-781e-df84-ad9a-22c2945ab138" [ 739.548995] env[62627]: _type = "Task" [ 739.548995] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.549240] env[62627]: DEBUG oslo_vmware.api [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Task: {'id': task-2193970, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163858} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.549887] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.550103] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 739.550261] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 739.550437] env[62627]: INFO nova.compute.manager [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Took 1.17 seconds to destroy the instance on the hypervisor. [ 739.550671] env[62627]: DEBUG oslo.service.loopingcall [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 739.553991] env[62627]: DEBUG nova.compute.manager [-] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 739.554148] env[62627]: DEBUG nova.network.neutron [-] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 739.563491] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52918f42-781e-df84-ad9a-22c2945ab138, 'name': SearchDatastore_Task, 'duration_secs': 0.011466} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.571066] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c77a5528-01e2-463d-a323-ead0ecd30d2f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.580759] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193971, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.582349] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 739.582349] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dc84c1-5950-4933-2d3b-d78cb9ea0a55" [ 739.582349] env[62627]: _type = "Task" [ 739.582349] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.594973] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dc84c1-5950-4933-2d3b-d78cb9ea0a55, 'name': SearchDatastore_Task, 'duration_secs': 0.012031} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.595292] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.595546] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 77c60059-535a-47ca-a5b0-45cc1df93b45/77c60059-535a-47ca-a5b0-45cc1df93b45.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 739.595821] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69d3dbc1-6424-4ebd-b422-4591e876fbfb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.606980] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 739.606980] env[62627]: value = "task-2193973" [ 739.606980] env[62627]: _type = "Task" [ 739.606980] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.619963] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193973, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.621546] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391fe160-3c54-4c04-bddb-b1ab4662b427 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.630681] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5e2b72-4de3-487a-93f9-55238753f5bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.669093] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bc3822-a363-467b-9f1a-9886c1368e24 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.685283] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae72100-8682-457d-b859-30a1b4ffa945 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.704494] env[62627]: DEBUG nova.compute.provider_tree [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.971177] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193972, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.075094] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193971, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.950045} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.075913] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] c1fd6afb-9332-446d-a441-5942b60edc94/c1fd6afb-9332-446d-a441-5942b60edc94.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 740.076225] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 740.076660] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-633fdcb9-0927-43fb-9b30-d67ca6bca58b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.087055] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 740.087055] env[62627]: value = "task-2193974" [ 740.087055] env[62627]: _type = "Task" [ 740.087055] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.099226] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193974, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.119463] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193973, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.182443] env[62627]: DEBUG nova.compute.manager [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 740.206940] env[62627]: DEBUG nova.scheduler.client.report [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 740.222089] env[62627]: DEBUG nova.virt.hardware [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 740.222487] env[62627]: DEBUG nova.virt.hardware [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.222560] env[62627]: DEBUG nova.virt.hardware [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 740.222781] env[62627]: DEBUG nova.virt.hardware [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.222988] env[62627]: DEBUG nova.virt.hardware [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 740.225143] env[62627]: DEBUG nova.virt.hardware [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 740.227806] env[62627]: DEBUG nova.virt.hardware [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 740.227806] env[62627]: DEBUG nova.virt.hardware [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 740.227806] env[62627]: DEBUG nova.virt.hardware [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 740.227806] env[62627]: DEBUG nova.virt.hardware [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 740.227806] env[62627]: DEBUG nova.virt.hardware [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 740.230444] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a24ed9-9593-4f57-ad32-b1ee9a2a51d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.242777] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da1faac-59a3-4f38-a12a-d3f3ad8bd014 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.326306] env[62627]: DEBUG nova.network.neutron [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Updated VIF entry in instance network info cache for port fee6b373-fb16-4c75-8b2d-d4df6e7e1952. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 740.326950] env[62627]: DEBUG nova.network.neutron [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Updating instance_info_cache with network_info: [{"id": "fee6b373-fb16-4c75-8b2d-d4df6e7e1952", "address": "fa:16:3e:b2:8b:41", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfee6b373-fb", "ovs_interfaceid": "fee6b373-fb16-4c75-8b2d-d4df6e7e1952", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.402788] env[62627]: DEBUG nova.compute.manager [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Stashing vm_state: active {{(pid=62627) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 740.474682] env[62627]: DEBUG oslo_vmware.api [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193972, 'name': PowerOnVM_Task, 'duration_secs': 0.739197} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.479020] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 740.479020] env[62627]: INFO nova.compute.manager [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Took 13.70 seconds to spawn the instance on the hypervisor. [ 740.479020] env[62627]: DEBUG nova.compute.manager [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 740.479020] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed8feac-f064-4de1-9677-3fbbef2d332d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.603058] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193974, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.161935} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.603739] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 740.607030] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f57163d-da63-468b-84e5-244a47d837c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.629044] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] c1fd6afb-9332-446d-a441-5942b60edc94/c1fd6afb-9332-446d-a441-5942b60edc94.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.629821] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28032d18-5b82-4d33-a162-60067191ba96 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.652068] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193973, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.803564} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.652623] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 77c60059-535a-47ca-a5b0-45cc1df93b45/77c60059-535a-47ca-a5b0-45cc1df93b45.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 740.652882] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 740.653508] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33b2cdff-eed4-4641-8cd7-a8b5f7fcdce8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.659491] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 740.659491] env[62627]: value = "task-2193975" [ 740.659491] env[62627]: _type = "Task" [ 740.659491] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.664941] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 740.664941] env[62627]: value = "task-2193976" [ 740.664941] env[62627]: _type = "Task" [ 740.664941] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.672658] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193975, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.683169] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.715006] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.567s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.715568] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 740.718084] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.757s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.721139] env[62627]: INFO nova.compute.claims [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.832776] env[62627]: DEBUG oslo_concurrency.lockutils [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] Releasing lock "refresh_cache-c1fd6afb-9332-446d-a441-5942b60edc94" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.833109] env[62627]: DEBUG nova.compute.manager [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Received event network-vif-plugged-af7ffe26-3161-4ff9-abf9-eb8480443269 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 740.833359] env[62627]: DEBUG oslo_concurrency.lockutils [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] Acquiring lock "77c60059-535a-47ca-a5b0-45cc1df93b45-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.833620] env[62627]: DEBUG oslo_concurrency.lockutils [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] Lock "77c60059-535a-47ca-a5b0-45cc1df93b45-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.833804] env[62627]: DEBUG oslo_concurrency.lockutils [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] Lock "77c60059-535a-47ca-a5b0-45cc1df93b45-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.834034] env[62627]: DEBUG nova.compute.manager [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] No waiting events found dispatching network-vif-plugged-af7ffe26-3161-4ff9-abf9-eb8480443269 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 740.834297] env[62627]: WARNING nova.compute.manager [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Received unexpected event network-vif-plugged-af7ffe26-3161-4ff9-abf9-eb8480443269 for instance with vm_state building and task_state spawning. [ 740.834512] env[62627]: DEBUG nova.compute.manager [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Received event network-changed-af7ffe26-3161-4ff9-abf9-eb8480443269 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 740.834679] env[62627]: DEBUG nova.compute.manager [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Refreshing instance network info cache due to event network-changed-af7ffe26-3161-4ff9-abf9-eb8480443269. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 740.835408] env[62627]: DEBUG oslo_concurrency.lockutils [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] Acquiring lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.835563] env[62627]: DEBUG oslo_concurrency.lockutils [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] Acquired lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.835790] env[62627]: DEBUG nova.network.neutron [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Refreshing network info cache for port af7ffe26-3161-4ff9-abf9-eb8480443269 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 740.889624] env[62627]: DEBUG nova.network.neutron [-] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.939271] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.003135] env[62627]: INFO nova.compute.manager [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Took 25.84 seconds to build instance. [ 741.173506] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193975, 'name': ReconfigVM_Task, 'duration_secs': 0.4412} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.174290] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Reconfigured VM instance instance-00000008 to attach disk [datastore2] c1fd6afb-9332-446d-a441-5942b60edc94/c1fd6afb-9332-446d-a441-5942b60edc94.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.174846] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de0b41b3-c40f-4243-84e3-2d6f8b150659 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.183025] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081895} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.183025] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 741.183025] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffdbb76-8752-4b69-a5ff-4e5c88bbd730 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.185350] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 741.185350] env[62627]: value = "task-2193977" [ 741.185350] env[62627]: _type = "Task" [ 741.185350] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.207852] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 77c60059-535a-47ca-a5b0-45cc1df93b45/77c60059-535a-47ca-a5b0-45cc1df93b45.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.208239] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05c237c2-958a-4a52-8556-b7dc20f107c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.229580] env[62627]: DEBUG nova.compute.utils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 741.231416] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193977, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.235977] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 741.235977] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 741.237789] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 741.237789] env[62627]: value = "task-2193978" [ 741.237789] env[62627]: _type = "Task" [ 741.237789] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.254839] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193978, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.304630] env[62627]: DEBUG nova.policy [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9511c993533466faa59ca47d51b2964', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fe72f9f87144786adbb917cf0da1b19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 741.392960] env[62627]: INFO nova.compute.manager [-] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Took 1.84 seconds to deallocate network for instance. [ 741.411121] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquiring lock "faa16f10-63f0-457c-8cd7-20a624454d86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.411121] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lock "faa16f10-63f0-457c-8cd7-20a624454d86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.506223] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aacaf878-b43b-40d9-b95b-45d7cca8f499 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "7366e4b0-bdba-435d-844d-8e81c4464b31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.354s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.698396] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193977, 'name': Rename_Task, 'duration_secs': 0.15397} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.699821] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 741.699821] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c9ab474-7f04-4936-b538-74469e7e8eda {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.711063] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 741.711063] env[62627]: value = "task-2193979" [ 741.711063] env[62627]: _type = "Task" [ 741.711063] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.723416] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193979, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.738425] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 741.758151] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193978, 'name': ReconfigVM_Task, 'duration_secs': 0.334855} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.758453] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 77c60059-535a-47ca-a5b0-45cc1df93b45/77c60059-535a-47ca-a5b0-45cc1df93b45.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.759674] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9af0214e-9116-43f0-8272-01dc132e0019 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.769812] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 741.769812] env[62627]: value = "task-2193980" [ 741.769812] env[62627]: _type = "Task" [ 741.769812] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.783992] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193980, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.834905] env[62627]: DEBUG nova.compute.manager [req-71904a41-acc0-44d6-a9f3-4e574ffea13f req-287a55f7-8c85-4c5d-8a37-c7d73a701e10 service nova] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Received event network-vif-deleted-db80642f-6b36-430d-badf-9ba7dacbbad6 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 741.847987] env[62627]: DEBUG nova.network.neutron [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Successfully updated port: 32dda461-7395-4104-9323-4a39535875de {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 741.904823] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.013037] env[62627]: DEBUG nova.compute.manager [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 742.195631] env[62627]: DEBUG nova.network.neutron [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Updated VIF entry in instance network info cache for port af7ffe26-3161-4ff9-abf9-eb8480443269. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 742.196197] env[62627]: DEBUG nova.network.neutron [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Updating instance_info_cache with network_info: [{"id": "af7ffe26-3161-4ff9-abf9-eb8480443269", "address": "fa:16:3e:15:cb:74", "network": {"id": "d75e7b13-f4ab-4626-9799-4970d691f831", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-781733128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca351451d2344fc1872f6faf89412354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf7ffe26-31", "ovs_interfaceid": "af7ffe26-3161-4ff9-abf9-eb8480443269", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.224612] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193979, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.234035] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9355c4-1011-4727-8c89-fa77495df331 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.245546] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087f7094-c3d4-4e7d-8161-2bd5b2b80589 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.292714] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8190f234-d07a-486c-9a1d-ad6b30aad84b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.309628] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2fd919-d068-487b-879d-bf571affbdb3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.314022] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193980, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.327820] env[62627]: DEBUG nova.compute.provider_tree [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.349735] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquiring lock "refresh_cache-db8ddb7b-b119-415c-98d9-dc5b3d2868b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.351899] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquired lock "refresh_cache-db8ddb7b-b119-415c-98d9-dc5b3d2868b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.351899] env[62627]: DEBUG nova.network.neutron [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.384055] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Successfully created port: ef05a825-d4fa-406d-b515-9d2a22422ddc {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.535811] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.699729] env[62627]: DEBUG oslo_concurrency.lockutils [req-8b0d1c59-4a7d-40fc-9be4-28d70fca98a3 req-0fe532d6-d8e0-4d89-93a5-49170911f316 service nova] Releasing lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.725283] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193979, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.755948] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 742.795641] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 742.795955] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.796106] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 742.796259] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.796431] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 742.796537] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 742.796782] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 742.796947] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 742.797482] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 742.797482] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 742.797482] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 742.798376] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd171ec0-b5b6-4cd1-a896-5588a778fb04 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.812720] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1fba4e-bd02-42d3-9b92-03c25e44a3b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.817277] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193980, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.834744] env[62627]: DEBUG nova.scheduler.client.report [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 742.906365] env[62627]: DEBUG nova.network.neutron [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.196040] env[62627]: DEBUG nova.network.neutron [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Updating instance_info_cache with network_info: [{"id": "32dda461-7395-4104-9323-4a39535875de", "address": "fa:16:3e:95:80:24", "network": {"id": "8cfc2882-69b0-4e2d-afd7-6c9f1d02ac6d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-150676617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65ae6a3964c44bcbae72d15466f6467e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32dda461-73", "ovs_interfaceid": "32dda461-7395-4104-9323-4a39535875de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.224760] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193979, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.299998] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193980, 'name': Rename_Task, 'duration_secs': 1.156959} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.300580] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 743.300962] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76b01aef-5746-41ca-8c75-fd5d4bffee53 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.311108] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 743.311108] env[62627]: value = "task-2193981" [ 743.311108] env[62627]: _type = "Task" [ 743.311108] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.319844] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193981, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.341083] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.623s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.341589] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 743.344723] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.720s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.345909] env[62627]: INFO nova.compute.claims [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.701138] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Releasing lock "refresh_cache-db8ddb7b-b119-415c-98d9-dc5b3d2868b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.701138] env[62627]: DEBUG nova.compute.manager [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Instance network_info: |[{"id": "32dda461-7395-4104-9323-4a39535875de", "address": "fa:16:3e:95:80:24", "network": {"id": "8cfc2882-69b0-4e2d-afd7-6c9f1d02ac6d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-150676617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65ae6a3964c44bcbae72d15466f6467e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32dda461-73", "ovs_interfaceid": "32dda461-7395-4104-9323-4a39535875de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 743.701636] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:80:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2321dbbe-f64a-4253-a462-21676f8a278e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32dda461-7395-4104-9323-4a39535875de', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 743.714726] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Creating folder: Project (65ae6a3964c44bcbae72d15466f6467e). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 743.715219] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81267137-dc2f-450a-aab4-8e0908843966 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.730019] env[62627]: DEBUG oslo_vmware.api [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2193979, 'name': PowerOnVM_Task, 'duration_secs': 1.610718} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.731791] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 743.732114] env[62627]: INFO nova.compute.manager [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Took 13.45 seconds to spawn the instance on the hypervisor. [ 743.732297] env[62627]: DEBUG nova.compute.manager [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.733541] env[62627]: DEBUG nova.compute.manager [None req-7a54d58d-aed0-44da-9a98-d04b45c932a7 tempest-ServerDiagnosticsV248Test-915452748 tempest-ServerDiagnosticsV248Test-915452748-project-admin] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.734880] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f06e2d3-2e56-44f1-9cf6-c6dd4922b4b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.739460] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f68fbc-19cd-4492-a29e-64fc85b55a4d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.746192] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Created folder: Project (65ae6a3964c44bcbae72d15466f6467e) in parent group-v447541. [ 743.746554] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Creating folder: Instances. Parent ref: group-v447571. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 743.747448] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-878eb2e9-ed4a-455b-a174-181186c73667 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.753024] env[62627]: INFO nova.compute.manager [None req-7a54d58d-aed0-44da-9a98-d04b45c932a7 tempest-ServerDiagnosticsV248Test-915452748 tempest-ServerDiagnosticsV248Test-915452748-project-admin] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Retrieving diagnostics [ 743.757363] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a10fdd9-bcd3-43bd-bfba-522541d43087 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.801797] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Created folder: Instances in parent group-v447571. [ 743.802825] env[62627]: DEBUG oslo.service.loopingcall [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.802825] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 743.802825] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70271375-acd3-4414-afa2-76bfb046c737 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.830293] env[62627]: DEBUG oslo_vmware.api [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2193981, 'name': PowerOnVM_Task, 'duration_secs': 0.472781} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.831914] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 743.832158] env[62627]: INFO nova.compute.manager [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Took 11.11 seconds to spawn the instance on the hypervisor. [ 743.832425] env[62627]: DEBUG nova.compute.manager [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.832949] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 743.832949] env[62627]: value = "task-2193984" [ 743.832949] env[62627]: _type = "Task" [ 743.832949] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.833670] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0e5ff6-0967-476e-9292-30c34d06eb0b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.846762] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193984, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.853559] env[62627]: DEBUG nova.compute.utils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 743.856465] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 743.856665] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 743.924485] env[62627]: DEBUG nova.policy [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9511c993533466faa59ca47d51b2964', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fe72f9f87144786adbb917cf0da1b19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 744.270977] env[62627]: INFO nova.compute.manager [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Took 26.27 seconds to build instance. [ 744.332582] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Successfully created port: a68f6bb7-6d15-4329-aedf-cc72bcc3e57a {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.347725] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193984, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.362502] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 744.368876] env[62627]: INFO nova.compute.manager [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Took 25.28 seconds to build instance. [ 744.779373] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ad0588c-0d6d-4bff-be10-b325220b688a tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "c1fd6afb-9332-446d-a441-5942b60edc94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.792s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.858078] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193984, 'name': CreateVM_Task, 'duration_secs': 0.696068} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.858282] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 744.858943] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.861030] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.861388] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 744.861656] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e3414de-fc5c-4b48-bd01-b2c33f7cd833 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.872347] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for the task: (returnval){ [ 744.872347] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a9112f-cbe1-c33e-9090-c5d790dc3c26" [ 744.872347] env[62627]: _type = "Task" [ 744.872347] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.874159] env[62627]: DEBUG oslo_concurrency.lockutils [None req-73c4cb24-3d0f-4784-a09d-75ead0c744ec tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "77c60059-535a-47ca-a5b0-45cc1df93b45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.805s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.875049] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde85bc5-5c47-4393-a43d-79f4e995d792 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.890431] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a9112f-cbe1-c33e-9090-c5d790dc3c26, 'name': SearchDatastore_Task, 'duration_secs': 0.011704} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.897028] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.897028] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 744.897028] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.897028] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.897277] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 744.897277] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97803ae2-af94-4f3b-bf97-9abd0855f0b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.898797] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc80f369-a736-4da5-b8cd-096a952565cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.940354] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02edd2c2-0045-4f95-bc37-cc4914322a36 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.941686] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 744.941861] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 744.942580] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c07215e-9fd7-4257-bed3-cddf10f8bbae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.948896] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for the task: (returnval){ [ 744.948896] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd2731-ff35-569f-246c-262c5e99f0a8" [ 744.948896] env[62627]: _type = "Task" [ 744.948896] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.959827] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b905c325-23ce-4f7d-be4f-b7caffc40af1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.969891] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd2731-ff35-569f-246c-262c5e99f0a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.978377] env[62627]: DEBUG nova.compute.provider_tree [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.981978] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Successfully updated port: ef05a825-d4fa-406d-b515-9d2a22422ddc {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 745.283691] env[62627]: DEBUG nova.compute.manager [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Received event network-vif-plugged-32dda461-7395-4104-9323-4a39535875de {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 745.283904] env[62627]: DEBUG oslo_concurrency.lockutils [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] Acquiring lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.284147] env[62627]: DEBUG oslo_concurrency.lockutils [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.284281] env[62627]: DEBUG oslo_concurrency.lockutils [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.285376] env[62627]: DEBUG nova.compute.manager [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] No waiting events found dispatching network-vif-plugged-32dda461-7395-4104-9323-4a39535875de {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 745.285376] env[62627]: WARNING nova.compute.manager [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Received unexpected event network-vif-plugged-32dda461-7395-4104-9323-4a39535875de for instance with vm_state building and task_state spawning. [ 745.285376] env[62627]: DEBUG nova.compute.manager [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Received event network-changed-32dda461-7395-4104-9323-4a39535875de {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 745.285376] env[62627]: DEBUG nova.compute.manager [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Refreshing instance network info cache due to event network-changed-32dda461-7395-4104-9323-4a39535875de. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 745.288451] env[62627]: DEBUG oslo_concurrency.lockutils [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] Acquiring lock "refresh_cache-db8ddb7b-b119-415c-98d9-dc5b3d2868b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.288818] env[62627]: DEBUG oslo_concurrency.lockutils [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] Acquired lock "refresh_cache-db8ddb7b-b119-415c-98d9-dc5b3d2868b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.288818] env[62627]: DEBUG nova.network.neutron [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Refreshing network info cache for port 32dda461-7395-4104-9323-4a39535875de {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 745.291851] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 745.375348] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 745.387129] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 745.409868] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 745.410161] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.410321] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 745.410548] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.410640] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 745.410785] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 745.411071] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 745.412226] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 745.412496] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 745.412640] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 745.412765] env[62627]: DEBUG nova.virt.hardware [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 745.414029] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a014d64e-3047-430e-b9e6-6afc50fb2806 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.424151] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27a2c70-107c-4cc2-942a-736d3b2a81c3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.468757] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd2731-ff35-569f-246c-262c5e99f0a8, 'name': SearchDatastore_Task, 'duration_secs': 0.020843} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.469923] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43503317-8dfc-4e76-af6f-8c7cc9ee6084 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.476545] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for the task: (returnval){ [ 745.476545] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e369f1-e3b8-f790-b2f0-62f55941d803" [ 745.476545] env[62627]: _type = "Task" [ 745.476545] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.482297] env[62627]: DEBUG nova.scheduler.client.report [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 745.487463] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "refresh_cache-33bf3932-3746-445b-b0fe-611466cdd9b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.487817] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "refresh_cache-33bf3932-3746-445b-b0fe-611466cdd9b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.487817] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.495079] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e369f1-e3b8-f790-b2f0-62f55941d803, 'name': SearchDatastore_Task, 'duration_secs': 0.012185} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.495764] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.496011] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] db8ddb7b-b119-415c-98d9-dc5b3d2868b7/db8ddb7b-b119-415c-98d9-dc5b3d2868b7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 745.497447] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d5512d8-f2ce-456f-a878-6a3b1880952c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.506417] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for the task: (returnval){ [ 745.506417] env[62627]: value = "task-2193985" [ 745.506417] env[62627]: _type = "Task" [ 745.506417] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.517591] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.563692] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquiring lock "16c4065b-83cf-4b1c-92bf-66ce0a75e573" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.563934] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "16c4065b-83cf-4b1c-92bf-66ce0a75e573" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.709106] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "a12855e2-1544-4e85-a7ff-c026fec861b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.709916] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "a12855e2-1544-4e85-a7ff-c026fec861b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.829046] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.924659] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.990406] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.990406] env[62627]: DEBUG nova.compute.manager [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 745.991594] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.301s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.994757] env[62627]: INFO nova.compute.claims [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 746.022903] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193985, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.127305] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.184605] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquiring lock "1298559d-14fb-422c-8206-a35a747f0c06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.185071] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Lock "1298559d-14fb-422c-8206-a35a747f0c06" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.185258] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquiring lock "1298559d-14fb-422c-8206-a35a747f0c06-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.185373] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Lock "1298559d-14fb-422c-8206-a35a747f0c06-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.185616] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Lock "1298559d-14fb-422c-8206-a35a747f0c06-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.188910] env[62627]: INFO nova.compute.manager [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Terminating instance [ 746.501234] env[62627]: DEBUG nova.compute.utils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 746.502691] env[62627]: DEBUG nova.compute.manager [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Not allocating networking since 'none' was specified. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 746.519561] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193985, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.700402} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.520040] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] db8ddb7b-b119-415c-98d9-dc5b3d2868b7/db8ddb7b-b119-415c-98d9-dc5b3d2868b7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 746.520040] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 746.520277] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fc2d5b1-f44a-4c42-949c-dcd0913eb3b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.529641] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for the task: (returnval){ [ 746.529641] env[62627]: value = "task-2193986" [ 746.529641] env[62627]: _type = "Task" [ 746.529641] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.540250] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193986, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.694402] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquiring lock "refresh_cache-1298559d-14fb-422c-8206-a35a747f0c06" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.694649] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquired lock "refresh_cache-1298559d-14fb-422c-8206-a35a747f0c06" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.694771] env[62627]: DEBUG nova.network.neutron [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 746.956767] env[62627]: DEBUG nova.network.neutron [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Updated VIF entry in instance network info cache for port 32dda461-7395-4104-9323-4a39535875de. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 746.957230] env[62627]: DEBUG nova.network.neutron [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Updating instance_info_cache with network_info: [{"id": "32dda461-7395-4104-9323-4a39535875de", "address": "fa:16:3e:95:80:24", "network": {"id": "8cfc2882-69b0-4e2d-afd7-6c9f1d02ac6d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-150676617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65ae6a3964c44bcbae72d15466f6467e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32dda461-73", "ovs_interfaceid": "32dda461-7395-4104-9323-4a39535875de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.000838] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Updating instance_info_cache with network_info: [{"id": "ef05a825-d4fa-406d-b515-9d2a22422ddc", "address": "fa:16:3e:fd:a2:d2", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef05a825-d4", "ovs_interfaceid": "ef05a825-d4fa-406d-b515-9d2a22422ddc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.004417] env[62627]: DEBUG nova.compute.manager [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 747.043340] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193986, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.325909} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.046335] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 747.047498] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a74fd69-4867-4f11-8555-95986514f044 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.071942] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] db8ddb7b-b119-415c-98d9-dc5b3d2868b7/db8ddb7b-b119-415c-98d9-dc5b3d2868b7.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 747.075492] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5efe3c57-4af1-48b0-b5f6-19f895750cf3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.099297] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for the task: (returnval){ [ 747.099297] env[62627]: value = "task-2193987" [ 747.099297] env[62627]: _type = "Task" [ 747.099297] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.112288] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193987, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.275491] env[62627]: DEBUG nova.network.neutron [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.461412] env[62627]: DEBUG oslo_concurrency.lockutils [req-1845c72d-8f3e-4728-aa82-2feef1b0e68c req-d8bdcaad-a5d9-4c27-bebf-d8cfe831ff6e service nova] Releasing lock "refresh_cache-db8ddb7b-b119-415c-98d9-dc5b3d2868b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.496570] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9138243d-5413-40dd-b15a-f37c502a2688 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.506128] env[62627]: DEBUG nova.network.neutron [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.506128] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "refresh_cache-33bf3932-3746-445b-b0fe-611466cdd9b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.506226] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Instance network_info: |[{"id": "ef05a825-d4fa-406d-b515-9d2a22422ddc", "address": "fa:16:3e:fd:a2:d2", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef05a825-d4", "ovs_interfaceid": "ef05a825-d4fa-406d-b515-9d2a22422ddc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 747.508053] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:a2:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef05a825-d4fa-406d-b515-9d2a22422ddc', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 747.516582] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Creating folder: Project (8fe72f9f87144786adbb917cf0da1b19). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.523209] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cdc3731-9060-4688-9d86-585b493d63fb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.525927] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0159bf-7b9e-40e1-af7e-7d66b5d11267 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.564421] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b1311b-9baf-4205-9274-c9fa51948c9b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.567188] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Created folder: Project (8fe72f9f87144786adbb917cf0da1b19) in parent group-v447541. [ 747.567369] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Creating folder: Instances. Parent ref: group-v447574. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.567732] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e8ebb61-a7e8-4e0c-8834-63415416af9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.576494] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0514bb-6163-4af1-a695-f53e5737e21b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.581731] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Created folder: Instances in parent group-v447574. [ 747.582388] env[62627]: DEBUG oslo.service.loopingcall [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 747.582999] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 747.582999] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-964c685d-95b3-47c8-ae7e-86f3e2750fa1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.598480] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Successfully updated port: a68f6bb7-6d15-4329-aedf-cc72bcc3e57a {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 747.608428] env[62627]: DEBUG nova.compute.provider_tree [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.616911] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.616911] env[62627]: value = "task-2193990" [ 747.616911] env[62627]: _type = "Task" [ 747.616911] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.621590] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.631773] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193990, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.011609] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Releasing lock "refresh_cache-1298559d-14fb-422c-8206-a35a747f0c06" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.016072] env[62627]: DEBUG nova.compute.manager [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 748.016072] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 748.016072] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9964a264-3eca-4420-a61d-065b42d4e297 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.030254] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.030612] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d519d6da-0303-438d-8a8d-6862e601d5ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.033471] env[62627]: DEBUG nova.compute.manager [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 748.043071] env[62627]: DEBUG oslo_vmware.api [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for the task: (returnval){ [ 748.043071] env[62627]: value = "task-2193991" [ 748.043071] env[62627]: _type = "Task" [ 748.043071] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.056615] env[62627]: DEBUG oslo_vmware.api [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193991, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.079225] env[62627]: DEBUG nova.virt.hardware [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 748.079462] env[62627]: DEBUG nova.virt.hardware [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.079610] env[62627]: DEBUG nova.virt.hardware [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 748.079787] env[62627]: DEBUG nova.virt.hardware [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.084344] env[62627]: DEBUG nova.virt.hardware [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 748.084610] env[62627]: DEBUG nova.virt.hardware [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 748.084911] env[62627]: DEBUG nova.virt.hardware [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 748.085364] env[62627]: DEBUG nova.virt.hardware [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 748.085619] env[62627]: DEBUG nova.virt.hardware [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 748.085792] env[62627]: DEBUG nova.virt.hardware [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 748.085963] env[62627]: DEBUG nova.virt.hardware [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 748.086836] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4ee445-47d7-429c-a8c4-8f9b2a083621 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.096394] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a3d309-c4bc-49f4-b948-611b7f94e8fc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.113940] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "refresh_cache-e8c87637-4a30-4482-af21-f58ad8a218fc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.114142] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "refresh_cache-e8c87637-4a30-4482-af21-f58ad8a218fc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.114293] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.116279] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.121920] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Creating folder: Project (405af76d61ec4b6eb87f2f9a5b0b65d4). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.126073] env[62627]: DEBUG nova.scheduler.client.report [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 748.130146] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f712eb8-1ee5-41b0-a1eb-cbf08b095c85 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.149364] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193990, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.151439] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193987, 'name': ReconfigVM_Task, 'duration_secs': 0.815677} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.151439] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Reconfigured VM instance instance-0000000a to attach disk [datastore2] db8ddb7b-b119-415c-98d9-dc5b3d2868b7/db8ddb7b-b119-415c-98d9-dc5b3d2868b7.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 748.152036] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Created folder: Project (405af76d61ec4b6eb87f2f9a5b0b65d4) in parent group-v447541. [ 748.152214] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Creating folder: Instances. Parent ref: group-v447577. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.152458] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e98d37f1-ea4c-4ae5-bcf9-8e384db6e0ca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.154183] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b5c3c3c-8063-4630-9a55-2917bfbe14fc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.164359] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for the task: (returnval){ [ 748.164359] env[62627]: value = "task-2193994" [ 748.164359] env[62627]: _type = "Task" [ 748.164359] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.176042] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Created folder: Instances in parent group-v447577. [ 748.176042] env[62627]: DEBUG oslo.service.loopingcall [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.176042] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 748.176042] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8d3f4cc-6211-4d2d-8d08-7b660ab1f9aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.194073] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193994, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.201722] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.201722] env[62627]: value = "task-2193995" [ 748.201722] env[62627]: _type = "Task" [ 748.201722] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.211773] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193995, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.510572] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "b1ba58c1-340f-486a-8f47-86fbc7b27485" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.511149] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "b1ba58c1-340f-486a-8f47-86fbc7b27485" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.559416] env[62627]: DEBUG oslo_vmware.api [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193991, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.572455] env[62627]: DEBUG nova.compute.manager [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Received event network-vif-plugged-ef05a825-d4fa-406d-b515-9d2a22422ddc {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 748.573235] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] Acquiring lock "33bf3932-3746-445b-b0fe-611466cdd9b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.573235] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] Lock "33bf3932-3746-445b-b0fe-611466cdd9b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.573235] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] Lock "33bf3932-3746-445b-b0fe-611466cdd9b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.573358] env[62627]: DEBUG nova.compute.manager [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] No waiting events found dispatching network-vif-plugged-ef05a825-d4fa-406d-b515-9d2a22422ddc {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 748.573471] env[62627]: WARNING nova.compute.manager [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Received unexpected event network-vif-plugged-ef05a825-d4fa-406d-b515-9d2a22422ddc for instance with vm_state building and task_state spawning. [ 748.573621] env[62627]: DEBUG nova.compute.manager [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Received event network-changed-ef05a825-d4fa-406d-b515-9d2a22422ddc {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 748.573731] env[62627]: DEBUG nova.compute.manager [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Refreshing instance network info cache due to event network-changed-ef05a825-d4fa-406d-b515-9d2a22422ddc. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 748.574146] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] Acquiring lock "refresh_cache-33bf3932-3746-445b-b0fe-611466cdd9b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.574146] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] Acquired lock "refresh_cache-33bf3932-3746-445b-b0fe-611466cdd9b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.574302] env[62627]: DEBUG nova.network.neutron [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Refreshing network info cache for port ef05a825-d4fa-406d-b515-9d2a22422ddc {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 748.635901] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.636291] env[62627]: DEBUG nova.compute.manager [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 748.640360] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.762s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.641285] env[62627]: INFO nova.compute.claims [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.652619] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193990, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.680427] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193994, 'name': Rename_Task, 'duration_secs': 0.230073} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.680741] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 748.681032] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4258926c-d7eb-4a9e-a654-a11132c13baa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.694805] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for the task: (returnval){ [ 748.694805] env[62627]: value = "task-2193996" [ 748.694805] env[62627]: _type = "Task" [ 748.694805] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.712928] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193996, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.714699] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.720632] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193995, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.055617] env[62627]: DEBUG oslo_vmware.api [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193991, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.145226] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193990, 'name': CreateVM_Task, 'duration_secs': 1.397672} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.146504] env[62627]: DEBUG nova.compute.utils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 749.156020] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 749.156020] env[62627]: DEBUG nova.compute.manager [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 749.156020] env[62627]: DEBUG nova.network.neutron [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 749.157957] env[62627]: DEBUG oslo_vmware.service [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3409e558-9928-4eea-bedb-7d248ee5cb6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.170175] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.170366] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.170797] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 749.171848] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20bcbb70-1d51-4ea6-a54a-524bc4a727e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.183560] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 749.183560] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c7099f-811c-5ef5-9af0-747372061b8e" [ 749.183560] env[62627]: _type = "Task" [ 749.183560] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.194425] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c7099f-811c-5ef5-9af0-747372061b8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.205450] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193996, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.214491] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193995, 'name': CreateVM_Task, 'duration_secs': 0.688375} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.214664] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 749.215041] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.549229] env[62627]: DEBUG nova.network.neutron [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Updating instance_info_cache with network_info: [{"id": "a68f6bb7-6d15-4329-aedf-cc72bcc3e57a", "address": "fa:16:3e:d9:a4:9c", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa68f6bb7-6d", "ovs_interfaceid": "a68f6bb7-6d15-4329-aedf-cc72bcc3e57a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.564132] env[62627]: DEBUG oslo_vmware.api [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193991, 'name': PowerOffVM_Task, 'duration_secs': 1.132357} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.564132] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 749.564280] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 749.564464] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f1db619-ca59-455c-96a1-c9226c0f672b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.569379] env[62627]: DEBUG nova.compute.manager [req-5e91c62b-c61b-43d4-95ff-7d45cb0113bf req-4da80b38-28d4-49f0-a535-14a51b6593af service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Received event network-changed-af7ffe26-3161-4ff9-abf9-eb8480443269 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 749.569379] env[62627]: DEBUG nova.compute.manager [req-5e91c62b-c61b-43d4-95ff-7d45cb0113bf req-4da80b38-28d4-49f0-a535-14a51b6593af service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Refreshing instance network info cache due to event network-changed-af7ffe26-3161-4ff9-abf9-eb8480443269. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 749.569379] env[62627]: DEBUG oslo_concurrency.lockutils [req-5e91c62b-c61b-43d4-95ff-7d45cb0113bf req-4da80b38-28d4-49f0-a535-14a51b6593af service nova] Acquiring lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.569379] env[62627]: DEBUG oslo_concurrency.lockutils [req-5e91c62b-c61b-43d4-95ff-7d45cb0113bf req-4da80b38-28d4-49f0-a535-14a51b6593af service nova] Acquired lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.569379] env[62627]: DEBUG nova.network.neutron [req-5e91c62b-c61b-43d4-95ff-7d45cb0113bf req-4da80b38-28d4-49f0-a535-14a51b6593af service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Refreshing network info cache for port af7ffe26-3161-4ff9-abf9-eb8480443269 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.579265] env[62627]: DEBUG nova.policy [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ced131d44564de590ed974aa65a2e24', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9bd77b83b3af479d9713f405098112ac', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 749.601577] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 749.601577] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 749.601747] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Deleting the datastore file [datastore2] 1298559d-14fb-422c-8206-a35a747f0c06 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 749.605912] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72e21e6c-6488-4de4-aa13-27ca5f2250d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.618502] env[62627]: DEBUG oslo_vmware.api [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for the task: (returnval){ [ 749.618502] env[62627]: value = "task-2193998" [ 749.618502] env[62627]: _type = "Task" [ 749.618502] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.631698] env[62627]: DEBUG oslo_vmware.api [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193998, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.660590] env[62627]: DEBUG nova.compute.manager [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 749.712183] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.712183] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.712183] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.712183] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.712342] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.712342] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.712405] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 749.712980] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7fde37d-6581-4981-b445-7985488a498e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.724786] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42595c92-0aa8-4215-b127-6b424a8ec3d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.727638] env[62627]: DEBUG oslo_vmware.api [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2193996, 'name': PowerOnVM_Task, 'duration_secs': 0.9516} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.729158] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 749.729158] env[62627]: INFO nova.compute.manager [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Took 9.55 seconds to spawn the instance on the hypervisor. [ 749.729374] env[62627]: DEBUG nova.compute.manager [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 749.730931] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d341232a-b27b-4d96-913a-f80466a2f95b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.735475] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 749.735475] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521ed78c-7e7d-177b-eddd-83eae2cb695a" [ 749.735475] env[62627]: _type = "Task" [ 749.735475] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.753077] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.753313] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 749.759288] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b3bf41-69ad-4d72-8396-e79b410762fa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.769730] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.769730] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.769730] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.777534] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10642811-8f1d-4e7c-827c-f91949a0ddfe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.784089] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 749.784089] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5205717f-50da-57d5-6dc3-b4e861339754" [ 749.784089] env[62627]: _type = "Task" [ 749.784089] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.794712] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5205717f-50da-57d5-6dc3-b4e861339754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.064504] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "refresh_cache-e8c87637-4a30-4482-af21-f58ad8a218fc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.066055] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Instance network_info: |[{"id": "a68f6bb7-6d15-4329-aedf-cc72bcc3e57a", "address": "fa:16:3e:d9:a4:9c", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa68f6bb7-6d", "ovs_interfaceid": "a68f6bb7-6d15-4329-aedf-cc72bcc3e57a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 750.066180] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:a4:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a68f6bb7-6d15-4329-aedf-cc72bcc3e57a', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.076985] env[62627]: DEBUG oslo.service.loopingcall [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.081586] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 750.082049] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3738dc7-788c-473d-9c3e-6daf05cd040f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.109935] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.109935] env[62627]: value = "task-2193999" [ 750.109935] env[62627]: _type = "Task" [ 750.109935] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.118904] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193999, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.134477] env[62627]: DEBUG oslo_vmware.api [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Task: {'id': task-2193998, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206691} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.134851] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.134851] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 750.135021] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.135239] env[62627]: INFO nova.compute.manager [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Took 2.12 seconds to destroy the instance on the hypervisor. [ 750.135471] env[62627]: DEBUG oslo.service.loopingcall [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.135623] env[62627]: DEBUG nova.compute.manager [-] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 750.135749] env[62627]: DEBUG nova.network.neutron [-] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.189822] env[62627]: DEBUG nova.network.neutron [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Updated VIF entry in instance network info cache for port ef05a825-d4fa-406d-b515-9d2a22422ddc. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 750.189822] env[62627]: DEBUG nova.network.neutron [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Updating instance_info_cache with network_info: [{"id": "ef05a825-d4fa-406d-b515-9d2a22422ddc", "address": "fa:16:3e:fd:a2:d2", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef05a825-d4", "ovs_interfaceid": "ef05a825-d4fa-406d-b515-9d2a22422ddc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.194785] env[62627]: DEBUG nova.network.neutron [-] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.253917] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33af253c-a03f-49e6-b928-7ec81aa8e9f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.269062] env[62627]: INFO nova.compute.manager [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Took 23.76 seconds to build instance. [ 750.270763] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92ab6d2-40cf-4fbc-806c-0b4044d48579 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.309081] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b550e7a7-3680-4bcf-bcea-241bcd5f5216 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.321342] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Preparing fetch location {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 750.321589] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Creating directory with path [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 750.322943] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468d47b8-e484-48f7-9c80-decaea71218f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.327171] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee1394e1-be17-4424-8e72-cb3329a19e47 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.339902] env[62627]: DEBUG nova.compute.provider_tree [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.355027] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Created directory with path [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 750.355140] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Fetch image to [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 750.355313] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Downloading image file data d5497cf1-16e8-478a-a834-e9289b1751b7 to [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62627) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 750.356101] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc041bfb-07e6-4e5a-a97d-65fc4a4ee8c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.365245] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee69ad99-9274-41f1-9b56-01ed8a4118ed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.376523] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79832f30-5842-46a8-80c6-62438d052cfd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.413162] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d621438-05bf-4cd7-a1dd-56229b8f0ff3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.420362] env[62627]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-408a24a5-c08f-4070-b0de-ede9e25499b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.444246] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Downloading image file data d5497cf1-16e8-478a-a834-e9289b1751b7 to the data store datastore1 {{(pid=62627) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 750.514630] env[62627]: DEBUG oslo_vmware.rw_handles [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62627) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 750.627018] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193999, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.674028] env[62627]: DEBUG nova.compute.manager [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 750.692973] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] Releasing lock "refresh_cache-33bf3932-3746-445b-b0fe-611466cdd9b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.693365] env[62627]: DEBUG nova.compute.manager [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Received event network-vif-plugged-a68f6bb7-6d15-4329-aedf-cc72bcc3e57a {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 750.693608] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] Acquiring lock "e8c87637-4a30-4482-af21-f58ad8a218fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.693798] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] Lock "e8c87637-4a30-4482-af21-f58ad8a218fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.694612] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] Lock "e8c87637-4a30-4482-af21-f58ad8a218fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.694612] env[62627]: DEBUG nova.compute.manager [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] No waiting events found dispatching network-vif-plugged-a68f6bb7-6d15-4329-aedf-cc72bcc3e57a {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 750.694612] env[62627]: WARNING nova.compute.manager [req-6c4637d4-b105-4710-a278-1053c60b653a req-2a46fdc2-cff9-495c-b6e7-a71dafd565fd service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Received unexpected event network-vif-plugged-a68f6bb7-6d15-4329-aedf-cc72bcc3e57a for instance with vm_state building and task_state spawning. [ 750.699202] env[62627]: DEBUG nova.network.neutron [-] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.707261] env[62627]: DEBUG nova.virt.hardware [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 750.708090] env[62627]: DEBUG nova.virt.hardware [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 750.708090] env[62627]: DEBUG nova.virt.hardware [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 750.708090] env[62627]: DEBUG nova.virt.hardware [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 750.708090] env[62627]: DEBUG nova.virt.hardware [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 750.708268] env[62627]: DEBUG nova.virt.hardware [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 750.709351] env[62627]: DEBUG nova.virt.hardware [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 750.709351] env[62627]: DEBUG nova.virt.hardware [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 750.709351] env[62627]: DEBUG nova.virt.hardware [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 750.709351] env[62627]: DEBUG nova.virt.hardware [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 750.709351] env[62627]: DEBUG nova.virt.hardware [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 750.709893] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73509686-fb13-410e-9ac5-5fca7e317ea5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.733173] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88bafeb-e88a-47a5-804c-b80802ad5726 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.775944] env[62627]: DEBUG oslo_concurrency.lockutils [None req-74cb0fea-2229-4922-899f-dc4c69d8107b tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.282s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.847812] env[62627]: DEBUG nova.scheduler.client.report [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 750.929232] env[62627]: DEBUG nova.network.neutron [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Successfully created port: abec21e1-204b-4ed0-801a-ab9f9bb8fcb9 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 750.940151] env[62627]: DEBUG nova.network.neutron [req-5e91c62b-c61b-43d4-95ff-7d45cb0113bf req-4da80b38-28d4-49f0-a535-14a51b6593af service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Updated VIF entry in instance network info cache for port af7ffe26-3161-4ff9-abf9-eb8480443269. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 750.940151] env[62627]: DEBUG nova.network.neutron [req-5e91c62b-c61b-43d4-95ff-7d45cb0113bf req-4da80b38-28d4-49f0-a535-14a51b6593af service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Updating instance_info_cache with network_info: [{"id": "af7ffe26-3161-4ff9-abf9-eb8480443269", "address": "fa:16:3e:15:cb:74", "network": {"id": "d75e7b13-f4ab-4626-9799-4970d691f831", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-781733128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca351451d2344fc1872f6faf89412354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf7ffe26-31", "ovs_interfaceid": "af7ffe26-3161-4ff9-abf9-eb8480443269", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.122063] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2193999, 'name': CreateVM_Task, 'duration_secs': 0.548821} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.122337] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 751.123513] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.123513] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.124234] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 751.124234] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5b04b4f-1905-4ab9-8d50-f0696124daf1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.130065] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 751.130065] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52614241-a1bf-ebff-facc-310f99cc3254" [ 751.130065] env[62627]: _type = "Task" [ 751.130065] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.140226] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52614241-a1bf-ebff-facc-310f99cc3254, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.201397] env[62627]: INFO nova.compute.manager [-] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Took 1.07 seconds to deallocate network for instance. [ 751.284121] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 751.336297] env[62627]: DEBUG oslo_vmware.rw_handles [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Completed reading data from the image iterator. {{(pid=62627) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 751.336699] env[62627]: DEBUG oslo_vmware.rw_handles [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 751.354140] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.714s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.354828] env[62627]: DEBUG nova.compute.manager [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 751.358029] env[62627]: DEBUG oslo_concurrency.lockutils [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.388s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.358314] env[62627]: DEBUG nova.objects.instance [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Lazy-loading 'resources' on Instance uuid 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 751.415445] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Downloaded image file data d5497cf1-16e8-478a-a834-e9289b1751b7 to vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62627) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 751.416549] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Caching image {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 751.417559] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Copying Virtual Disk [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk to [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 751.417668] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0d69a36-5b11-4ee9-985f-f8ff2f291b81 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.427444] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 751.427444] env[62627]: value = "task-2194000" [ 751.427444] env[62627]: _type = "Task" [ 751.427444] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.442917] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.447748] env[62627]: DEBUG oslo_concurrency.lockutils [req-5e91c62b-c61b-43d4-95ff-7d45cb0113bf req-4da80b38-28d4-49f0-a535-14a51b6593af service nova] Releasing lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.645726] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.645997] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.646241] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.654839] env[62627]: DEBUG nova.compute.manager [req-bf184173-a618-4ffa-ba5a-511816c75e8b req-752d3179-e410-466f-823a-376897e7eea7 service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Received event network-changed-a68f6bb7-6d15-4329-aedf-cc72bcc3e57a {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 751.655101] env[62627]: DEBUG nova.compute.manager [req-bf184173-a618-4ffa-ba5a-511816c75e8b req-752d3179-e410-466f-823a-376897e7eea7 service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Refreshing instance network info cache due to event network-changed-a68f6bb7-6d15-4329-aedf-cc72bcc3e57a. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 751.659553] env[62627]: DEBUG oslo_concurrency.lockutils [req-bf184173-a618-4ffa-ba5a-511816c75e8b req-752d3179-e410-466f-823a-376897e7eea7 service nova] Acquiring lock "refresh_cache-e8c87637-4a30-4482-af21-f58ad8a218fc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.659553] env[62627]: DEBUG oslo_concurrency.lockutils [req-bf184173-a618-4ffa-ba5a-511816c75e8b req-752d3179-e410-466f-823a-376897e7eea7 service nova] Acquired lock "refresh_cache-e8c87637-4a30-4482-af21-f58ad8a218fc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.659553] env[62627]: DEBUG nova.network.neutron [req-bf184173-a618-4ffa-ba5a-511816c75e8b req-752d3179-e410-466f-823a-376897e7eea7 service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Refreshing network info cache for port a68f6bb7-6d15-4329-aedf-cc72bcc3e57a {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.715856] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.818816] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.861673] env[62627]: DEBUG nova.compute.utils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 751.872208] env[62627]: DEBUG nova.compute.manager [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 751.873749] env[62627]: DEBUG nova.network.neutron [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 751.945518] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194000, 'name': CopyVirtualDisk_Task} progress is 70%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.016495] env[62627]: DEBUG nova.policy [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3c4f7676644413395ef9f0026d96f67', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '559dec0891b747438b15609a2f41ac0f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 752.313218] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquiring lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.313218] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.377236] env[62627]: DEBUG nova.compute.manager [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 752.422382] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef57d29-757d-4374-bbec-b820d008bf4e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.428994] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf922530-f962-4b48-9315-433c94c5cab5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.493329] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b439f7b8-cdd0-4e58-acb4-3695ebb71947 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.497068] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194000, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.891084} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.497204] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Copied Virtual Disk [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk to [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 752.497651] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleting the datastore file [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7/tmp-sparse.vmdk {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 752.498372] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e413026a-cc00-42fc-baf9-77dcedfd3f72 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.505295] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955bb8ac-f4f2-4cbf-88d9-b936ea868053 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.513780] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 752.513780] env[62627]: value = "task-2194001" [ 752.513780] env[62627]: _type = "Task" [ 752.513780] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.537487] env[62627]: DEBUG nova.compute.provider_tree [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.547114] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.028475} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.547114] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 752.547114] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Moving file from [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34/d5497cf1-16e8-478a-a834-e9289b1751b7 to [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7. {{(pid=62627) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 752.547114] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-d299bb80-f8b5-49a8-aaf0-7607ea54238b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.556282] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 752.556282] env[62627]: value = "task-2194002" [ 752.556282] env[62627]: _type = "Task" [ 752.556282] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.571318] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194002, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.044088] env[62627]: DEBUG nova.scheduler.client.report [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 753.068022] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194002, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.039537} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.068280] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] File moved {{(pid=62627) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 753.068469] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Cleaning up location [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 753.068624] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleting the datastore file [datastore1] vmware_temp/7a8ba68d-6f9a-44d6-a0e4-19cde6375c34 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 753.068879] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-edb81e88-ee6e-4735-a8e7-f3a5de4d86b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.077549] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 753.077549] env[62627]: value = "task-2194003" [ 753.077549] env[62627]: _type = "Task" [ 753.077549] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.092963] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194003, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.124086] env[62627]: DEBUG nova.network.neutron [req-bf184173-a618-4ffa-ba5a-511816c75e8b req-752d3179-e410-466f-823a-376897e7eea7 service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Updated VIF entry in instance network info cache for port a68f6bb7-6d15-4329-aedf-cc72bcc3e57a. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 753.125758] env[62627]: DEBUG nova.network.neutron [req-bf184173-a618-4ffa-ba5a-511816c75e8b req-752d3179-e410-466f-823a-376897e7eea7 service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Updating instance_info_cache with network_info: [{"id": "a68f6bb7-6d15-4329-aedf-cc72bcc3e57a", "address": "fa:16:3e:d9:a4:9c", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa68f6bb7-6d", "ovs_interfaceid": "a68f6bb7-6d15-4329-aedf-cc72bcc3e57a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.250079] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.250484] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.370935] env[62627]: DEBUG nova.compute.manager [req-cf4303bd-a52f-4393-a5c4-ae27f71d7950 req-50242e2a-0055-4cba-9893-19e0698a5815 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Received event network-changed-af7ffe26-3161-4ff9-abf9-eb8480443269 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 753.371203] env[62627]: DEBUG nova.compute.manager [req-cf4303bd-a52f-4393-a5c4-ae27f71d7950 req-50242e2a-0055-4cba-9893-19e0698a5815 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Refreshing instance network info cache due to event network-changed-af7ffe26-3161-4ff9-abf9-eb8480443269. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 753.371359] env[62627]: DEBUG oslo_concurrency.lockutils [req-cf4303bd-a52f-4393-a5c4-ae27f71d7950 req-50242e2a-0055-4cba-9893-19e0698a5815 service nova] Acquiring lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.371522] env[62627]: DEBUG oslo_concurrency.lockutils [req-cf4303bd-a52f-4393-a5c4-ae27f71d7950 req-50242e2a-0055-4cba-9893-19e0698a5815 service nova] Acquired lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.371739] env[62627]: DEBUG nova.network.neutron [req-cf4303bd-a52f-4393-a5c4-ae27f71d7950 req-50242e2a-0055-4cba-9893-19e0698a5815 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Refreshing network info cache for port af7ffe26-3161-4ff9-abf9-eb8480443269 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 753.387942] env[62627]: DEBUG nova.compute.manager [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 753.411701] env[62627]: DEBUG nova.network.neutron [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Successfully created port: 6de87f0c-6ba0-47bb-8c42-aa150e7855fe {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 753.417413] env[62627]: DEBUG nova.virt.hardware [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 753.417413] env[62627]: DEBUG nova.virt.hardware [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 753.417556] env[62627]: DEBUG nova.virt.hardware [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 753.417750] env[62627]: DEBUG nova.virt.hardware [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 753.417932] env[62627]: DEBUG nova.virt.hardware [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 753.418126] env[62627]: DEBUG nova.virt.hardware [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 753.418393] env[62627]: DEBUG nova.virt.hardware [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 753.418558] env[62627]: DEBUG nova.virt.hardware [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 753.418756] env[62627]: DEBUG nova.virt.hardware [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 753.419017] env[62627]: DEBUG nova.virt.hardware [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 753.419327] env[62627]: DEBUG nova.virt.hardware [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 753.420584] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b55b846-7f4f-44cb-8ee8-afbfca77e8d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.432031] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e289c72-5ecd-45d0-b3f8-72f86617f58e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.556394] env[62627]: DEBUG oslo_concurrency.lockutils [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.196s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.563757] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.705s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.564590] env[62627]: INFO nova.compute.claims [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.593357] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194003, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085922} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.593357] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 753.593498] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e12b3650-fb97-40ed-9af9-6b6481f29e06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.600448] env[62627]: INFO nova.scheduler.client.report [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Deleted allocations for instance 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0 [ 753.606111] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 753.606111] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5294341f-3c37-a0f1-fe7c-c4962bf8547d" [ 753.606111] env[62627]: _type = "Task" [ 753.606111] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.618530] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5294341f-3c37-a0f1-fe7c-c4962bf8547d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.622590] env[62627]: DEBUG nova.network.neutron [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Successfully updated port: abec21e1-204b-4ed0-801a-ab9f9bb8fcb9 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 753.628096] env[62627]: DEBUG oslo_concurrency.lockutils [req-bf184173-a618-4ffa-ba5a-511816c75e8b req-752d3179-e410-466f-823a-376897e7eea7 service nova] Releasing lock "refresh_cache-e8c87637-4a30-4482-af21-f58ad8a218fc" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.116630] env[62627]: DEBUG oslo_concurrency.lockutils [None req-096fe3c8-8fc9-44cf-8029-4ac58f2116c8 tempest-DeleteServersAdminTestJSON-184387882 tempest-DeleteServersAdminTestJSON-184387882-project-admin] Lock "5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.315s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.123487] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5294341f-3c37-a0f1-fe7c-c4962bf8547d, 'name': SearchDatastore_Task, 'duration_secs': 0.010182} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.126019] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.126019] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 33bf3932-3746-445b-b0fe-611466cdd9b1/33bf3932-3746-445b-b0fe-611466cdd9b1.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 754.126019] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.126019] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 754.126637] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3d3d2d4-a013-41d1-ac7d-679c656b2291 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.129604] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49c08651-3d4c-4f89-9dc0-0f321e7d5a8a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.133547] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquiring lock "refresh_cache-7691c2e2-4e43-4990-bf4d-2ef3298a4264" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.133734] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquired lock "refresh_cache-7691c2e2-4e43-4990-bf4d-2ef3298a4264" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.133986] env[62627]: DEBUG nova.network.neutron [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 754.144778] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 754.144778] env[62627]: value = "task-2194004" [ 754.144778] env[62627]: _type = "Task" [ 754.144778] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.157037] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 754.157276] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 754.159295] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55afce14-84ef-41d8-988d-6879968b227b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.169059] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.172713] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 754.172713] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]526e4276-48c7-5d4f-5b61-3a2d197ef993" [ 754.172713] env[62627]: _type = "Task" [ 754.172713] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.182782] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526e4276-48c7-5d4f-5b61-3a2d197ef993, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.266393] env[62627]: DEBUG nova.compute.manager [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Received event network-vif-plugged-abec21e1-204b-4ed0-801a-ab9f9bb8fcb9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 754.266597] env[62627]: DEBUG oslo_concurrency.lockutils [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] Acquiring lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.266791] env[62627]: DEBUG oslo_concurrency.lockutils [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] Lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.267278] env[62627]: DEBUG oslo_concurrency.lockutils [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] Lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.267632] env[62627]: DEBUG nova.compute.manager [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] No waiting events found dispatching network-vif-plugged-abec21e1-204b-4ed0-801a-ab9f9bb8fcb9 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 754.267689] env[62627]: WARNING nova.compute.manager [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Received unexpected event network-vif-plugged-abec21e1-204b-4ed0-801a-ab9f9bb8fcb9 for instance with vm_state building and task_state spawning. [ 754.267835] env[62627]: DEBUG nova.compute.manager [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Received event network-changed-abec21e1-204b-4ed0-801a-ab9f9bb8fcb9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 754.267981] env[62627]: DEBUG nova.compute.manager [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Refreshing instance network info cache due to event network-changed-abec21e1-204b-4ed0-801a-ab9f9bb8fcb9. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 754.268271] env[62627]: DEBUG oslo_concurrency.lockutils [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] Acquiring lock "refresh_cache-7691c2e2-4e43-4990-bf4d-2ef3298a4264" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.346646] env[62627]: DEBUG nova.network.neutron [req-cf4303bd-a52f-4393-a5c4-ae27f71d7950 req-50242e2a-0055-4cba-9893-19e0698a5815 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Updated VIF entry in instance network info cache for port af7ffe26-3161-4ff9-abf9-eb8480443269. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 754.348944] env[62627]: DEBUG nova.network.neutron [req-cf4303bd-a52f-4393-a5c4-ae27f71d7950 req-50242e2a-0055-4cba-9893-19e0698a5815 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Updating instance_info_cache with network_info: [{"id": "af7ffe26-3161-4ff9-abf9-eb8480443269", "address": "fa:16:3e:15:cb:74", "network": {"id": "d75e7b13-f4ab-4626-9799-4970d691f831", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-781733128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca351451d2344fc1872f6faf89412354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf7ffe26-31", "ovs_interfaceid": "af7ffe26-3161-4ff9-abf9-eb8480443269", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.661084] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194004, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507316} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.661442] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 33bf3932-3746-445b-b0fe-611466cdd9b1/33bf3932-3746-445b-b0fe-611466cdd9b1.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.661856] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.662139] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96770cca-f938-493b-8c5b-fe13500007f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.675553] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 754.675553] env[62627]: value = "task-2194005" [ 754.675553] env[62627]: _type = "Task" [ 754.675553] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.686618] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526e4276-48c7-5d4f-5b61-3a2d197ef993, 'name': SearchDatastore_Task, 'duration_secs': 0.019908} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.693764] env[62627]: DEBUG nova.network.neutron [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.696234] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194005, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.699018] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5d3a270-11f5-473e-941f-c3c9ca3584f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.706820] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 754.706820] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cbf46f-29d1-3693-e701-12d4e5aa690b" [ 754.706820] env[62627]: _type = "Task" [ 754.706820] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.720079] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cbf46f-29d1-3693-e701-12d4e5aa690b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.857662] env[62627]: DEBUG oslo_concurrency.lockutils [req-cf4303bd-a52f-4393-a5c4-ae27f71d7950 req-50242e2a-0055-4cba-9893-19e0698a5815 service nova] Releasing lock "refresh_cache-77c60059-535a-47ca-a5b0-45cc1df93b45" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.998572] env[62627]: DEBUG nova.network.neutron [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Updating instance_info_cache with network_info: [{"id": "abec21e1-204b-4ed0-801a-ab9f9bb8fcb9", "address": "fa:16:3e:88:23:61", "network": {"id": "227e798c-693c-4b8a-ad80-f419e26c7366", "bridge": "br-int", "label": "tempest-ServersTestJSON-1035789165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bd77b83b3af479d9713f405098112ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabec21e1-20", "ovs_interfaceid": "abec21e1-204b-4ed0-801a-ab9f9bb8fcb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.105072] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0e836f-8243-45ac-b0b7-f2b3ab7c7cec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.113360] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a19062-891f-4af5-a6de-8b9223626eb2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.149483] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5a380d-a5d6-4a38-9e10-fd4e572b8fd3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.158536] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b5d08b-baf6-4083-a3ed-6df121e11fb2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.175878] env[62627]: DEBUG nova.compute.provider_tree [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.186754] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194005, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071204} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.187650] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.188564] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a552d1a2-4c39-49ab-af4b-57053ac12415 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.213906] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 33bf3932-3746-445b-b0fe-611466cdd9b1/33bf3932-3746-445b-b0fe-611466cdd9b1.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.216028] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68a0ba44-42f4-437d-be29-5ca2e15689ed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.244228] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cbf46f-29d1-3693-e701-12d4e5aa690b, 'name': SearchDatastore_Task, 'duration_secs': 0.011132} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.245740] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.246276] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 755.246509] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 755.246509] env[62627]: value = "task-2194006" [ 755.246509] env[62627]: _type = "Task" [ 755.246509] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.247376] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.247376] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 755.247376] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7311eab-d921-49c7-9d92-fad2ee3c0830 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.249515] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3906b573-c2a0-4d1c-a65d-99b174fcb4b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.262715] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194006, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.265887] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 755.265887] env[62627]: value = "task-2194007" [ 755.265887] env[62627]: _type = "Task" [ 755.265887] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.267034] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 755.267034] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 755.269671] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac6efc4a-299e-416f-961a-9cfdd6a63b39 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.283976] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 755.283976] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523439d4-1932-edbe-0502-228cb75f0268" [ 755.283976] env[62627]: _type = "Task" [ 755.283976] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.284125] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.295287] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523439d4-1932-edbe-0502-228cb75f0268, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.357697] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquiring lock "95ac4fbf-071a-403e-8cae-88d9899757ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.357697] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "95ac4fbf-071a-403e-8cae-88d9899757ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.507227] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Releasing lock "refresh_cache-7691c2e2-4e43-4990-bf4d-2ef3298a4264" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.508039] env[62627]: DEBUG nova.compute.manager [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Instance network_info: |[{"id": "abec21e1-204b-4ed0-801a-ab9f9bb8fcb9", "address": "fa:16:3e:88:23:61", "network": {"id": "227e798c-693c-4b8a-ad80-f419e26c7366", "bridge": "br-int", "label": "tempest-ServersTestJSON-1035789165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bd77b83b3af479d9713f405098112ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabec21e1-20", "ovs_interfaceid": "abec21e1-204b-4ed0-801a-ab9f9bb8fcb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 755.508039] env[62627]: DEBUG oslo_concurrency.lockutils [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] Acquired lock "refresh_cache-7691c2e2-4e43-4990-bf4d-2ef3298a4264" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.508213] env[62627]: DEBUG nova.network.neutron [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Refreshing network info cache for port abec21e1-204b-4ed0-801a-ab9f9bb8fcb9 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 755.509320] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:23:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1cbd5e0e-9116-46f1-9748-13a73d2d7e75', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'abec21e1-204b-4ed0-801a-ab9f9bb8fcb9', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 755.518986] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Creating folder: Project (9bd77b83b3af479d9713f405098112ac). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 755.519623] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39e6ee8b-67cd-41f8-9b69-af28ac3583b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.531314] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Created folder: Project (9bd77b83b3af479d9713f405098112ac) in parent group-v447541. [ 755.531772] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Creating folder: Instances. Parent ref: group-v447581. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 755.531772] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75b6041c-1596-4221-9649-51fd58f67b1f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.547597] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Created folder: Instances in parent group-v447581. [ 755.547897] env[62627]: DEBUG oslo.service.loopingcall [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 755.548058] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 755.548277] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4bc8bbc6-3cdf-47b5-aacc-4661eadc14ea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.577040] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 755.577040] env[62627]: value = "task-2194010" [ 755.577040] env[62627]: _type = "Task" [ 755.577040] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.579561] env[62627]: DEBUG nova.network.neutron [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Successfully updated port: 6de87f0c-6ba0-47bb-8c42-aa150e7855fe {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 755.587322] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194010, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.682018] env[62627]: DEBUG nova.scheduler.client.report [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 755.773603] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194006, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.785211] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194007, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.798996] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523439d4-1932-edbe-0502-228cb75f0268, 'name': SearchDatastore_Task, 'duration_secs': 0.047553} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.800743] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f5b8b3b-895a-423c-ae1a-ad6372c6c53d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.813079] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 755.813079] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ea7ba2-6f43-8f36-75fb-b47a1b00a5ef" [ 755.813079] env[62627]: _type = "Task" [ 755.813079] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.833121] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ea7ba2-6f43-8f36-75fb-b47a1b00a5ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.883971] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "cbd30c37-f6c3-4229-8b95-855cab64924c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.883971] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "cbd30c37-f6c3-4229-8b95-855cab64924c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.084751] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "refresh_cache-f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.084909] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquired lock "refresh_cache-f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.085068] env[62627]: DEBUG nova.network.neutron [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 756.090873] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194010, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.189568] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.630s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.189890] env[62627]: DEBUG nova.compute.manager [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 756.200415] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 15.261s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.267524] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194006, 'name': ReconfigVM_Task, 'duration_secs': 0.784113} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.269077] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 33bf3932-3746-445b-b0fe-611466cdd9b1/33bf3932-3746-445b-b0fe-611466cdd9b1.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.276553] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4fa17c42-2c7a-4e33-a657-0cee4cf41b82 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.279050] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.279273] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.291238] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194007, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.704186} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.292874] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 756.293303] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 756.293751] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 756.293751] env[62627]: value = "task-2194011" [ 756.293751] env[62627]: _type = "Task" [ 756.293751] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.294068] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70ed01e5-d7de-4ca4-8a4e-456db7aa5b76 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.306027] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194011, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.307465] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 756.307465] env[62627]: value = "task-2194012" [ 756.307465] env[62627]: _type = "Task" [ 756.307465] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.313726] env[62627]: DEBUG nova.compute.manager [req-f9243084-7992-4295-a442-e6781426d947 req-e5fca34e-ce17-4ebd-ac3c-81ab98ec9af0 service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Received event network-changed-32dda461-7395-4104-9323-4a39535875de {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 756.313726] env[62627]: DEBUG nova.compute.manager [req-f9243084-7992-4295-a442-e6781426d947 req-e5fca34e-ce17-4ebd-ac3c-81ab98ec9af0 service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Refreshing instance network info cache due to event network-changed-32dda461-7395-4104-9323-4a39535875de. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 756.313726] env[62627]: DEBUG oslo_concurrency.lockutils [req-f9243084-7992-4295-a442-e6781426d947 req-e5fca34e-ce17-4ebd-ac3c-81ab98ec9af0 service nova] Acquiring lock "refresh_cache-db8ddb7b-b119-415c-98d9-dc5b3d2868b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.313726] env[62627]: DEBUG oslo_concurrency.lockutils [req-f9243084-7992-4295-a442-e6781426d947 req-e5fca34e-ce17-4ebd-ac3c-81ab98ec9af0 service nova] Acquired lock "refresh_cache-db8ddb7b-b119-415c-98d9-dc5b3d2868b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.313726] env[62627]: DEBUG nova.network.neutron [req-f9243084-7992-4295-a442-e6781426d947 req-e5fca34e-ce17-4ebd-ac3c-81ab98ec9af0 service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Refreshing network info cache for port 32dda461-7395-4104-9323-4a39535875de {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 756.327187] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194012, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.339061] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ea7ba2-6f43-8f36-75fb-b47a1b00a5ef, 'name': SearchDatastore_Task, 'duration_secs': 0.06064} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.339061] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.339061] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] e8c87637-4a30-4482-af21-f58ad8a218fc/e8c87637-4a30-4482-af21-f58ad8a218fc.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 756.339061] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ee44207-c0ea-48ab-8bb5-069c8424cd10 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.346593] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 756.346593] env[62627]: value = "task-2194013" [ 756.346593] env[62627]: _type = "Task" [ 756.346593] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.359167] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194013, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.588904] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194010, 'name': CreateVM_Task, 'duration_secs': 0.747599} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.589481] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.590612] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.590768] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.591118] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 756.591365] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-289f0450-af67-47e4-b9d2-3df1423baa52 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.605162] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for the task: (returnval){ [ 756.605162] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d2d5e8-1211-95d3-b399-afa3b0da8d6b" [ 756.605162] env[62627]: _type = "Task" [ 756.605162] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.621355] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d2d5e8-1211-95d3-b399-afa3b0da8d6b, 'name': SearchDatastore_Task, 'duration_secs': 0.012054} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.621662] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.621891] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 756.622151] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.622322] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.622504] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 756.622770] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4641096-1458-4251-8926-e831f47beb97 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.633272] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 756.633479] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 756.634341] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57ff2725-0b97-4284-9223-fe5e5efdfa3f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.641809] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for the task: (returnval){ [ 756.641809] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ff5f3e-0709-360b-bf98-3dba5ea18bf0" [ 756.641809] env[62627]: _type = "Task" [ 756.641809] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.656157] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ff5f3e-0709-360b-bf98-3dba5ea18bf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.708919] env[62627]: DEBUG nova.compute.utils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 756.711587] env[62627]: INFO nova.compute.claims [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.716842] env[62627]: DEBUG nova.compute.manager [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 756.716842] env[62627]: DEBUG nova.network.neutron [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 756.718733] env[62627]: DEBUG nova.network.neutron [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.828634] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194011, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.835706] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194012, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087801} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.835996] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 756.837160] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7182d544-3a79-4d1d-aa7b-f6e8b2053ee1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.857924] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 756.861167] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68998252-0e89-45df-95f4-98a8ffb94518 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.892691] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194013, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.894585] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 756.894585] env[62627]: value = "task-2194014" [ 756.894585] env[62627]: _type = "Task" [ 756.894585] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.905386] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194014, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.932351] env[62627]: DEBUG nova.compute.manager [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Received event network-vif-plugged-6de87f0c-6ba0-47bb-8c42-aa150e7855fe {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 756.932990] env[62627]: DEBUG oslo_concurrency.lockutils [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] Acquiring lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.932990] env[62627]: DEBUG oslo_concurrency.lockutils [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] Lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.933390] env[62627]: DEBUG oslo_concurrency.lockutils [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] Lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.933601] env[62627]: DEBUG nova.compute.manager [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] No waiting events found dispatching network-vif-plugged-6de87f0c-6ba0-47bb-8c42-aa150e7855fe {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 756.933773] env[62627]: WARNING nova.compute.manager [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Received unexpected event network-vif-plugged-6de87f0c-6ba0-47bb-8c42-aa150e7855fe for instance with vm_state building and task_state spawning. [ 756.933988] env[62627]: DEBUG nova.compute.manager [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Received event network-changed-6de87f0c-6ba0-47bb-8c42-aa150e7855fe {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 756.934230] env[62627]: DEBUG nova.compute.manager [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Refreshing instance network info cache due to event network-changed-6de87f0c-6ba0-47bb-8c42-aa150e7855fe. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 756.934457] env[62627]: DEBUG oslo_concurrency.lockutils [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] Acquiring lock "refresh_cache-f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.024258] env[62627]: DEBUG nova.policy [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba14552786e54a559118919720c8b61e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd97a4a01696d4c4884df85a475c4bb7b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 757.032583] env[62627]: DEBUG nova.network.neutron [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Updated VIF entry in instance network info cache for port abec21e1-204b-4ed0-801a-ab9f9bb8fcb9. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 757.032583] env[62627]: DEBUG nova.network.neutron [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Updating instance_info_cache with network_info: [{"id": "abec21e1-204b-4ed0-801a-ab9f9bb8fcb9", "address": "fa:16:3e:88:23:61", "network": {"id": "227e798c-693c-4b8a-ad80-f419e26c7366", "bridge": "br-int", "label": "tempest-ServersTestJSON-1035789165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bd77b83b3af479d9713f405098112ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabec21e1-20", "ovs_interfaceid": "abec21e1-204b-4ed0-801a-ab9f9bb8fcb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.158342] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ff5f3e-0709-360b-bf98-3dba5ea18bf0, 'name': SearchDatastore_Task, 'duration_secs': 0.01087} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.159168] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d283b3f-30f6-4aab-820b-647d300fd4c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.166090] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for the task: (returnval){ [ 757.166090] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52220d0b-82ee-7719-c834-97f0d2aa74f3" [ 757.166090] env[62627]: _type = "Task" [ 757.166090] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.179050] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52220d0b-82ee-7719-c834-97f0d2aa74f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.216636] env[62627]: DEBUG nova.compute.manager [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 757.222387] env[62627]: INFO nova.compute.resource_tracker [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating resource usage from migration ee86539f-a2cf-4a4f-a1a2-80c788c57dd2 [ 757.309548] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194011, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.326645] env[62627]: DEBUG nova.network.neutron [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Updating instance_info_cache with network_info: [{"id": "6de87f0c-6ba0-47bb-8c42-aa150e7855fe", "address": "fa:16:3e:c6:6c:49", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6de87f0c-6b", "ovs_interfaceid": "6de87f0c-6ba0-47bb-8c42-aa150e7855fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.367374] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194013, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.411429] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194014, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.535573] env[62627]: DEBUG oslo_concurrency.lockutils [req-77b83800-cb6f-4bed-ae0b-6dc72ea17942 req-d9627f24-db52-4486-91b4-8d710fad2b26 service nova] Releasing lock "refresh_cache-7691c2e2-4e43-4990-bf4d-2ef3298a4264" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.596313] env[62627]: DEBUG nova.network.neutron [req-f9243084-7992-4295-a442-e6781426d947 req-e5fca34e-ce17-4ebd-ac3c-81ab98ec9af0 service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Updated VIF entry in instance network info cache for port 32dda461-7395-4104-9323-4a39535875de. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 757.596638] env[62627]: DEBUG nova.network.neutron [req-f9243084-7992-4295-a442-e6781426d947 req-e5fca34e-ce17-4ebd-ac3c-81ab98ec9af0 service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Updating instance_info_cache with network_info: [{"id": "32dda461-7395-4104-9323-4a39535875de", "address": "fa:16:3e:95:80:24", "network": {"id": "8cfc2882-69b0-4e2d-afd7-6c9f1d02ac6d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-150676617-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65ae6a3964c44bcbae72d15466f6467e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32dda461-73", "ovs_interfaceid": "32dda461-7395-4104-9323-4a39535875de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.691081] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52220d0b-82ee-7719-c834-97f0d2aa74f3, 'name': SearchDatastore_Task, 'duration_secs': 0.01168} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.696271] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.697497] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 7691c2e2-4e43-4990-bf4d-2ef3298a4264/7691c2e2-4e43-4990-bf4d-2ef3298a4264.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 757.697497] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1bca5547-ffb9-4f6f-816e-0c3ca039d51b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.716203] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for the task: (returnval){ [ 757.716203] env[62627]: value = "task-2194015" [ 757.716203] env[62627]: _type = "Task" [ 757.716203] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.737915] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194015, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.809258] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f92021-cb5b-4d15-960a-bc67c08b15f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.822486] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1a7d74-4a1d-4a51-b1a4-c2048b5b9483 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.827507] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194011, 'name': Rename_Task, 'duration_secs': 1.161675} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.827507] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Releasing lock "refresh_cache-f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.827858] env[62627]: DEBUG nova.compute.manager [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Instance network_info: |[{"id": "6de87f0c-6ba0-47bb-8c42-aa150e7855fe", "address": "fa:16:3e:c6:6c:49", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6de87f0c-6b", "ovs_interfaceid": "6de87f0c-6ba0-47bb-8c42-aa150e7855fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 757.828151] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 757.828996] env[62627]: DEBUG oslo_concurrency.lockutils [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] Acquired lock "refresh_cache-f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.829284] env[62627]: DEBUG nova.network.neutron [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Refreshing network info cache for port 6de87f0c-6ba0-47bb-8c42-aa150e7855fe {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 757.830807] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:6c:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6de87f0c-6ba0-47bb-8c42-aa150e7855fe', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 757.839462] env[62627]: DEBUG oslo.service.loopingcall [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 757.839781] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-373fc67c-c5d8-4af8-9183-08dd9d4a9480 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.872445] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 757.873198] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f6c6b21-ca4c-4fb5-8425-cca2b62ac9ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.897481] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09631760-f7f5-4ef4-81c2-eb08bbc77f7a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.901785] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 757.901785] env[62627]: value = "task-2194016" [ 757.901785] env[62627]: _type = "Task" [ 757.901785] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.903850] env[62627]: DEBUG nova.network.neutron [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Successfully created port: e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 757.922459] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194013, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.922695] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 757.922695] env[62627]: value = "task-2194017" [ 757.922695] env[62627]: _type = "Task" [ 757.922695] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.933846] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194014, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.937547] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2a399e-ec60-4bef-a19a-41f6fa1d70f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.946547] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194017, 'name': CreateVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.950621] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194016, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.961283] env[62627]: DEBUG nova.compute.provider_tree [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.107584] env[62627]: DEBUG oslo_concurrency.lockutils [req-f9243084-7992-4295-a442-e6781426d947 req-e5fca34e-ce17-4ebd-ac3c-81ab98ec9af0 service nova] Releasing lock "refresh_cache-db8ddb7b-b119-415c-98d9-dc5b3d2868b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.230330] env[62627]: DEBUG nova.compute.manager [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 758.235476] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194015, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.280115] env[62627]: DEBUG nova.virt.hardware [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 758.280115] env[62627]: DEBUG nova.virt.hardware [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 758.280115] env[62627]: DEBUG nova.virt.hardware [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 758.280375] env[62627]: DEBUG nova.virt.hardware [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 758.280375] env[62627]: DEBUG nova.virt.hardware [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 758.280744] env[62627]: DEBUG nova.virt.hardware [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 758.281685] env[62627]: DEBUG nova.virt.hardware [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 758.281685] env[62627]: DEBUG nova.virt.hardware [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 758.281685] env[62627]: DEBUG nova.virt.hardware [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 758.281685] env[62627]: DEBUG nova.virt.hardware [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 758.282075] env[62627]: DEBUG nova.virt.hardware [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 758.282864] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a24494-5e5c-4d64-a148-7a05a68cf5d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.292410] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ea3fdb-9bc7-4e86-973f-cf10a3035c13 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.343422] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.343422] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.387082] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194013, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.570445} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.387615] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] e8c87637-4a30-4482-af21-f58ad8a218fc/e8c87637-4a30-4482-af21-f58ad8a218fc.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 758.387947] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 758.388291] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc1b744c-c0cb-4ad9-9fdd-4eaf597d3e3a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.398941] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 758.398941] env[62627]: value = "task-2194018" [ 758.398941] env[62627]: _type = "Task" [ 758.398941] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.422736] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194018, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.423111] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194014, 'name': ReconfigVM_Task, 'duration_secs': 1.333024} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.424109] env[62627]: DEBUG nova.network.neutron [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Updated VIF entry in instance network info cache for port 6de87f0c-6ba0-47bb-8c42-aa150e7855fe. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 758.424874] env[62627]: DEBUG nova.network.neutron [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Updating instance_info_cache with network_info: [{"id": "6de87f0c-6ba0-47bb-8c42-aa150e7855fe", "address": "fa:16:3e:c6:6c:49", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6de87f0c-6b", "ovs_interfaceid": "6de87f0c-6ba0-47bb-8c42-aa150e7855fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.436535] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 758.437571] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194016, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.437777] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ed21ccd-0582-455f-b142-329eb085187b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.456140] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194017, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.458070] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 758.458070] env[62627]: value = "task-2194019" [ 758.458070] env[62627]: _type = "Task" [ 758.458070] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.474510] env[62627]: DEBUG nova.scheduler.client.report [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 758.479641] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194019, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.728749] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194015, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.737667} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.729033] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 7691c2e2-4e43-4990-bf4d-2ef3298a4264/7691c2e2-4e43-4990-bf4d-2ef3298a4264.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 758.729248] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 758.729514] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f6ccdc8-3839-4b2e-9ddb-dab253e6bf83 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.740031] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for the task: (returnval){ [ 758.740031] env[62627]: value = "task-2194020" [ 758.740031] env[62627]: _type = "Task" [ 758.740031] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.746881] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194020, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.910086] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194018, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101973} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.910356] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 758.911166] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ebbe9f-e25f-416f-b5bc-66cb597df4cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.922215] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194016, 'name': PowerOnVM_Task, 'duration_secs': 0.72469} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.932396] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 758.932396] env[62627]: INFO nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Took 16.18 seconds to spawn the instance on the hypervisor. [ 758.932396] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 758.940828] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] e8c87637-4a30-4482-af21-f58ad8a218fc/e8c87637-4a30-4482-af21-f58ad8a218fc.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 758.941829] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2877ed84-97f8-435f-b72f-c72b575429c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.944615] env[62627]: DEBUG oslo_concurrency.lockutils [req-102d9e9d-1392-4a40-a565-5cb3e9457b0a req-8ddf5f00-a316-4721-860e-dac2f0042da2 service nova] Releasing lock "refresh_cache-f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.945071] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87b2fd03-002f-4cf9-af6d-f1ca0c210041 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.977035] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194017, 'name': CreateVM_Task, 'duration_secs': 0.784131} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.978823] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.779s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.979019] env[62627]: INFO nova.compute.manager [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Migrating [ 758.979241] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.979384] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.980409] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 758.980761] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 758.980761] env[62627]: value = "task-2194021" [ 758.980761] env[62627]: _type = "Task" [ 758.980761] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.984840] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.081s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.985133] env[62627]: DEBUG nova.objects.instance [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Lazy-loading 'resources' on Instance uuid f15982c3-23ed-4f25-bf55-3418cac117cc {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 758.987681] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.987681] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.987681] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 758.987993] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194019, 'name': Rename_Task, 'duration_secs': 0.185104} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.988417] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-326f626f-57b3-4c87-9ff2-b84c8c4733a8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.993791] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 758.995731] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0c58d2f-0400-4d57-8c98-beff92cff904 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.001527] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 759.001527] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cab492-d870-42bc-bc6c-a7719129fdac" [ 759.001527] env[62627]: _type = "Task" [ 759.001527] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.005087] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194021, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.009325] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 759.009325] env[62627]: value = "task-2194022" [ 759.009325] env[62627]: _type = "Task" [ 759.009325] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.016349] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cab492-d870-42bc-bc6c-a7719129fdac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.022278] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194022, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.250578] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194020, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071944} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.250870] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 759.251843] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398096c7-750f-4994-8547-261c38814d36 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.287449] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 7691c2e2-4e43-4990-bf4d-2ef3298a4264/7691c2e2-4e43-4990-bf4d-2ef3298a4264.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 759.287449] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6410a64e-84c0-4be1-ac7c-77d01fd7878f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.308320] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for the task: (returnval){ [ 759.308320] env[62627]: value = "task-2194023" [ 759.308320] env[62627]: _type = "Task" [ 759.308320] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.317614] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194023, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.491440] env[62627]: INFO nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Took 30.14 seconds to build instance. [ 759.493312] env[62627]: INFO nova.compute.rpcapi [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 759.494434] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.530873] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194021, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.537272] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cab492-d870-42bc-bc6c-a7719129fdac, 'name': SearchDatastore_Task, 'duration_secs': 0.029024} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.541196] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.541640] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 759.541640] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.541742] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.541933] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 759.542716] env[62627]: DEBUG oslo_vmware.api [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194022, 'name': PowerOnVM_Task, 'duration_secs': 0.522235} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.545646] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fe760f1-e7bd-455c-912d-56b46778ee37 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.548052] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 759.548264] env[62627]: INFO nova.compute.manager [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Took 11.51 seconds to spawn the instance on the hypervisor. [ 759.548490] env[62627]: DEBUG nova.compute.manager [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 759.550490] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3aec92-9ddb-42f0-a395-138cf3da9ca7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.568952] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 759.569224] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 759.571806] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-819c4a72-424b-470c-99b4-80cdd2eaf068 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.578553] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 759.578553] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52adb91a-ce02-b04c-25e3-07dfe4bc39ab" [ 759.578553] env[62627]: _type = "Task" [ 759.578553] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.590741] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52adb91a-ce02-b04c-25e3-07dfe4bc39ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.822173] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194023, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.004270] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.004524] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.004827] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "33bf3932-3746-445b-b0fe-611466cdd9b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.062s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.009782] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194021, 'name': ReconfigVM_Task, 'duration_secs': 0.847774} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.010371] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Reconfigured VM instance instance-0000000c to attach disk [datastore1] e8c87637-4a30-4482-af21-f58ad8a218fc/e8c87637-4a30-4482-af21-f58ad8a218fc.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.011530] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-50e2c1e0-2712-4684-9d81-1d155a04b4b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.019981] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.020202] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.020598] env[62627]: DEBUG nova.network.neutron [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 760.022171] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 760.022171] env[62627]: value = "task-2194024" [ 760.022171] env[62627]: _type = "Task" [ 760.022171] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.038335] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194024, 'name': Rename_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.075810] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0dd4dc-e0f6-4665-9377-278872646cf9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.081104] env[62627]: INFO nova.compute.manager [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Took 27.48 seconds to build instance. [ 760.095524] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff02aff1-4e75-4921-a0c8-736f6d8d4da8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.098533] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52adb91a-ce02-b04c-25e3-07dfe4bc39ab, 'name': SearchDatastore_Task, 'duration_secs': 0.012773} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.099656] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-294cd79b-919e-497e-939a-2887b64843be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.132295] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44638729-8ffe-4a02-b09e-db7a409782e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.135275] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 760.135275] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523b5b12-41b8-616b-6ee1-59306f572ea5" [ 760.135275] env[62627]: _type = "Task" [ 760.135275] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.142711] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4234b7-e113-4150-a40a-c650828d8fe0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.151313] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523b5b12-41b8-616b-6ee1-59306f572ea5, 'name': SearchDatastore_Task, 'duration_secs': 0.013489} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.151941] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.152234] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7/f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 760.152489] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95b2a3c1-9517-47f3-a5d3-db7730d4d735 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.163332] env[62627]: DEBUG nova.compute.provider_tree [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.171523] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 760.171523] env[62627]: value = "task-2194025" [ 760.171523] env[62627]: _type = "Task" [ 760.171523] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.182603] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.321425] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194023, 'name': ReconfigVM_Task, 'duration_secs': 0.547301} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.321700] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 7691c2e2-4e43-4990-bf4d-2ef3298a4264/7691c2e2-4e43-4990-bf4d-2ef3298a4264.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.322504] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1da1ce8-4226-463d-860c-5742cbf2343b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.331032] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for the task: (returnval){ [ 760.331032] env[62627]: value = "task-2194026" [ 760.331032] env[62627]: _type = "Task" [ 760.331032] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.346153] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194026, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.366169] env[62627]: DEBUG nova.network.neutron [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Successfully updated port: e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 760.507949] env[62627]: DEBUG nova.compute.manager [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 760.539164] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194024, 'name': Rename_Task, 'duration_secs': 0.152182} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.539464] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 760.539703] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f905d6f-2ea6-495c-8221-8b8bcb6c8484 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.548160] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 760.548160] env[62627]: value = "task-2194027" [ 760.548160] env[62627]: _type = "Task" [ 760.548160] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.559116] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194027, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.586728] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7417376b-fe81-4e01-bd4f-141d2f29195d tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "63e884c4-2cc1-431d-8a7f-1c4259b29e1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.212s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.663205] env[62627]: DEBUG nova.compute.manager [req-fb7c8767-5cf0-4231-bde9-703469003868 req-0779fc4c-a63a-4c14-b79c-6e032755697f service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Received event network-vif-plugged-e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 760.663489] env[62627]: DEBUG oslo_concurrency.lockutils [req-fb7c8767-5cf0-4231-bde9-703469003868 req-0779fc4c-a63a-4c14-b79c-6e032755697f service nova] Acquiring lock "a397bce6-30c7-485d-81ab-88a8e021996c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.664032] env[62627]: DEBUG oslo_concurrency.lockutils [req-fb7c8767-5cf0-4231-bde9-703469003868 req-0779fc4c-a63a-4c14-b79c-6e032755697f service nova] Lock "a397bce6-30c7-485d-81ab-88a8e021996c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.664249] env[62627]: DEBUG oslo_concurrency.lockutils [req-fb7c8767-5cf0-4231-bde9-703469003868 req-0779fc4c-a63a-4c14-b79c-6e032755697f service nova] Lock "a397bce6-30c7-485d-81ab-88a8e021996c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.664435] env[62627]: DEBUG nova.compute.manager [req-fb7c8767-5cf0-4231-bde9-703469003868 req-0779fc4c-a63a-4c14-b79c-6e032755697f service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] No waiting events found dispatching network-vif-plugged-e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 760.664645] env[62627]: WARNING nova.compute.manager [req-fb7c8767-5cf0-4231-bde9-703469003868 req-0779fc4c-a63a-4c14-b79c-6e032755697f service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Received unexpected event network-vif-plugged-e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991 for instance with vm_state building and task_state spawning. [ 760.666258] env[62627]: DEBUG nova.scheduler.client.report [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.684854] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194025, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.842610] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194026, 'name': Rename_Task, 'duration_secs': 0.160635} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.843228] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 760.843499] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17d5bf24-470d-4c09-94fb-862d43e76556 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.856384] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for the task: (returnval){ [ 760.856384] env[62627]: value = "task-2194028" [ 760.856384] env[62627]: _type = "Task" [ 760.856384] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.866787] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194028, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.874556] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "refresh_cache-a397bce6-30c7-485d-81ab-88a8e021996c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.874945] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquired lock "refresh_cache-a397bce6-30c7-485d-81ab-88a8e021996c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.874983] env[62627]: DEBUG nova.network.neutron [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 760.967267] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "aed62160-2db5-4008-a491-2b27ae81ef5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.967267] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "aed62160-2db5-4008-a491-2b27ae81ef5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.980457] env[62627]: DEBUG nova.network.neutron [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance_info_cache with network_info: [{"id": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "address": "fa:16:3e:4c:84:35", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.153", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d230b4-8a", "ovs_interfaceid": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.036200] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.063509] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194027, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.092497] env[62627]: DEBUG nova.compute.manager [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 761.172794] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.175735] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.640s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.177874] env[62627]: INFO nova.compute.claims [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.194202] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.685618} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.194539] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7/f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 761.194775] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 761.195298] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3f9c184-48c5-461b-981a-00daf06a84c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.203830] env[62627]: INFO nova.scheduler.client.report [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Deleted allocations for instance f15982c3-23ed-4f25-bf55-3418cac117cc [ 761.209843] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 761.209843] env[62627]: value = "task-2194029" [ 761.209843] env[62627]: _type = "Task" [ 761.209843] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.221991] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194029, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.368155] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194028, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.441846] env[62627]: DEBUG nova.network.neutron [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.443103] env[62627]: INFO nova.compute.manager [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Rebuilding instance [ 761.485343] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.512698] env[62627]: DEBUG nova.compute.manager [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 761.513628] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1eb4da8-de0d-41b0-ae6f-a22d570fd715 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.561510] env[62627]: DEBUG oslo_vmware.api [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194027, 'name': PowerOnVM_Task, 'duration_secs': 0.572507} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.561943] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 761.562298] env[62627]: INFO nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Took 16.19 seconds to spawn the instance on the hypervisor. [ 761.562601] env[62627]: DEBUG nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 761.563595] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b2ff85-38f8-4323-9cd5-e394dd04cc59 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.617702] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.714024] env[62627]: DEBUG nova.network.neutron [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Updating instance_info_cache with network_info: [{"id": "e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991", "address": "fa:16:3e:f6:d8:65", "network": {"id": "d8e684f9-7aa5-4513-a4df-afb52c5cb804", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-671089707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d97a4a01696d4c4884df85a475c4bb7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape56ec5b3-09", "ovs_interfaceid": "e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.717807] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb814016-dead-4c7a-bc89-ac22a708eba5 tempest-ServerDiagnosticsNegativeTest-1603042516 tempest-ServerDiagnosticsNegativeTest-1603042516-project-member] Lock "f15982c3-23ed-4f25-bf55-3418cac117cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.846s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.736359] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194029, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086895} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.738262] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 761.740610] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a48fd7d-f735-4869-8347-6580d84efa3b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.773833] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7/f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 761.774513] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de068ab7-d5d5-4686-88c2-db32d7d55abe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.799221] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 761.799221] env[62627]: value = "task-2194030" [ 761.799221] env[62627]: _type = "Task" [ 761.799221] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.809878] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194030, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.867885] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194028, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.087636] env[62627]: INFO nova.compute.manager [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Took 31.16 seconds to build instance. [ 762.222139] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Releasing lock "refresh_cache-a397bce6-30c7-485d-81ab-88a8e021996c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.222139] env[62627]: DEBUG nova.compute.manager [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Instance network_info: |[{"id": "e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991", "address": "fa:16:3e:f6:d8:65", "network": {"id": "d8e684f9-7aa5-4513-a4df-afb52c5cb804", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-671089707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d97a4a01696d4c4884df85a475c4bb7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape56ec5b3-09", "ovs_interfaceid": "e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 762.222500] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:d8:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 762.230124] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Creating folder: Project (d97a4a01696d4c4884df85a475c4bb7b). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 762.231522] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91364ce3-961e-4b6c-8b9a-28da597a074a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.252274] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Created folder: Project (d97a4a01696d4c4884df85a475c4bb7b) in parent group-v447541. [ 762.252502] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Creating folder: Instances. Parent ref: group-v447585. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 762.253735] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa1fa632-7d5f-4ce9-8663-4b012eccbd44 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.271801] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Created folder: Instances in parent group-v447585. [ 762.271801] env[62627]: DEBUG oslo.service.loopingcall [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.271801] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 762.271801] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43ceb045-2373-4bc2-9a91-8732bafdf45a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.296990] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 762.296990] env[62627]: value = "task-2194033" [ 762.296990] env[62627]: _type = "Task" [ 762.296990] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.315496] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194033, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.315496] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194030, 'name': ReconfigVM_Task, 'duration_secs': 0.462808} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.318718] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Reconfigured VM instance instance-0000000f to attach disk [datastore2] f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7/f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 762.319405] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa7dfa39-3d9e-40c2-aa10-c0a9a5752652 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.327922] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 762.327922] env[62627]: value = "task-2194034" [ 762.327922] env[62627]: _type = "Task" [ 762.327922] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.343786] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194034, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.371782] env[62627]: DEBUG oslo_vmware.api [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194028, 'name': PowerOnVM_Task, 'duration_secs': 1.329347} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.374771] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 762.374987] env[62627]: INFO nova.compute.manager [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Took 11.70 seconds to spawn the instance on the hypervisor. [ 762.375186] env[62627]: DEBUG nova.compute.manager [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 762.376436] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5592fb29-bc6b-4dc6-904f-f83d43369da1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.546069] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 762.546069] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-627ee6bc-69a5-4bc8-af4d-7665f04097e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.557699] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 762.557699] env[62627]: value = "task-2194035" [ 762.557699] env[62627]: _type = "Task" [ 762.557699] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.571845] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194035, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.592655] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2174986-6b02-4e71-b54e-db4b4098df8b tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "e8c87637-4a30-4482-af21-f58ad8a218fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.596s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.814165] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194033, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.843413] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194034, 'name': Rename_Task, 'duration_secs': 0.372304} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.846378] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 762.847404] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb5125f7-4fd7-44b7-93f9-232df6ee48fb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.855663] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 762.855663] env[62627]: value = "task-2194036" [ 762.855663] env[62627]: _type = "Task" [ 762.855663] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.867831] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.882711] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef67ca4-cf35-4bef-ae47-8163e7b9fe72 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.897740] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e7d5e4-7e7e-4693-9078-584f522ac943 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.902569] env[62627]: INFO nova.compute.manager [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Took 30.24 seconds to build instance. [ 762.933693] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49c807f-9d27-40dd-8f74-b627c6ab3dd9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.942807] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb98647-6a1e-42da-a434-0b8301ba767e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.958571] env[62627]: DEBUG nova.compute.provider_tree [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.002259] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8a1f75-cf6a-446b-83a5-b3e87705f2bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.027128] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance 'ab999825-4310-4fd8-81f0-6bb5dde4175b' progress to 0 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 763.068823] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194035, 'name': PowerOffVM_Task, 'duration_secs': 0.172722} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.069154] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 763.070961] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 763.070961] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb00b43c-67d6-4747-a0ab-b1f3dbd59dbb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.079532] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 763.080146] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb2df3e4-b491-48fa-acfd-55257d3d7cf4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.091622] env[62627]: DEBUG nova.compute.manager [req-b86e64ea-2dd4-427b-8137-25ed4a81c778 req-cd3257a0-8c73-4f03-b5a6-d520acd433ff service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Received event network-changed-e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 763.091810] env[62627]: DEBUG nova.compute.manager [req-b86e64ea-2dd4-427b-8137-25ed4a81c778 req-cd3257a0-8c73-4f03-b5a6-d520acd433ff service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Refreshing instance network info cache due to event network-changed-e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 763.092036] env[62627]: DEBUG oslo_concurrency.lockutils [req-b86e64ea-2dd4-427b-8137-25ed4a81c778 req-cd3257a0-8c73-4f03-b5a6-d520acd433ff service nova] Acquiring lock "refresh_cache-a397bce6-30c7-485d-81ab-88a8e021996c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.092175] env[62627]: DEBUG oslo_concurrency.lockutils [req-b86e64ea-2dd4-427b-8137-25ed4a81c778 req-cd3257a0-8c73-4f03-b5a6-d520acd433ff service nova] Acquired lock "refresh_cache-a397bce6-30c7-485d-81ab-88a8e021996c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.092330] env[62627]: DEBUG nova.network.neutron [req-b86e64ea-2dd4-427b-8137-25ed4a81c778 req-cd3257a0-8c73-4f03-b5a6-d520acd433ff service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Refreshing network info cache for port e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 763.098813] env[62627]: DEBUG nova.compute.manager [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 763.112988] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 763.114547] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 763.117781] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Deleting the datastore file [datastore1] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 763.117781] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4630e692-b279-4733-a5d0-13cd9cd6383b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.126089] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 763.126089] env[62627]: value = "task-2194038" [ 763.126089] env[62627]: _type = "Task" [ 763.126089] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.137783] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194038, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.311379] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194033, 'name': CreateVM_Task, 'duration_secs': 0.51878} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.311379] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 763.311730] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.311995] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.312976] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 763.314064] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deda43b5-4d0c-4e49-aeb5-37033c321444 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.324888] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 763.324888] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dab71d-9c4d-43c2-7a9f-84c0cdb32927" [ 763.324888] env[62627]: _type = "Task" [ 763.324888] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.342877] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dab71d-9c4d-43c2-7a9f-84c0cdb32927, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.367263] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194036, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.372451] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquiring lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.372748] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.407132] env[62627]: DEBUG oslo_concurrency.lockutils [None req-839f990e-eb21-4784-91f6-d6aa2db4cf30 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.992s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.464296] env[62627]: DEBUG nova.scheduler.client.report [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 763.540384] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 763.544707] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2cb60bc0-2b95-4362-a44a-7066909088b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.558746] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 763.558746] env[62627]: value = "task-2194039" [ 763.558746] env[62627]: _type = "Task" [ 763.558746] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.572930] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.625479] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.640291] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194038, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172899} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.640478] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.640579] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 763.640726] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.839135] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dab71d-9c4d-43c2-7a9f-84c0cdb32927, 'name': SearchDatastore_Task, 'duration_secs': 0.016828} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.839575] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.839903] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 763.840456] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.840651] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.840857] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 763.841328] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed5ede9b-e36b-48ad-8b7e-e0c92e3eb53b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.853268] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 763.853490] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 763.854336] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-853763c5-8a19-4204-bfb5-2ca0e1309274 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.866283] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 763.866283] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523e1bc2-c38a-c426-6dce-327bc9ee93b2" [ 763.866283] env[62627]: _type = "Task" [ 763.866283] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.876903] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194036, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.884774] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523e1bc2-c38a-c426-6dce-327bc9ee93b2, 'name': SearchDatastore_Task, 'duration_secs': 0.013473} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.889453] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd931e1a-0d76-494e-bb4a-d5da80aa3d4a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.898906] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 763.898906] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522611b2-3e96-00a0-45ce-3c45b2abf9f8" [ 763.898906] env[62627]: _type = "Task" [ 763.898906] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.910818] env[62627]: DEBUG nova.compute.manager [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 763.913759] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522611b2-3e96-00a0-45ce-3c45b2abf9f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.969850] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.794s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.970230] env[62627]: DEBUG nova.compute.manager [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 763.974632] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.146s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.975771] env[62627]: INFO nova.compute.claims [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 764.046237] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "e8c87637-4a30-4482-af21-f58ad8a218fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.046237] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "e8c87637-4a30-4482-af21-f58ad8a218fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.046429] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "e8c87637-4a30-4482-af21-f58ad8a218fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.046753] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "e8c87637-4a30-4482-af21-f58ad8a218fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.046853] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "e8c87637-4a30-4482-af21-f58ad8a218fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.049949] env[62627]: INFO nova.compute.manager [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Terminating instance [ 764.069828] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194039, 'name': PowerOffVM_Task, 'duration_secs': 0.307718} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.070142] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 764.070333] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance 'ab999825-4310-4fd8-81f0-6bb5dde4175b' progress to 17 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 764.296259] env[62627]: DEBUG nova.network.neutron [req-b86e64ea-2dd4-427b-8137-25ed4a81c778 req-cd3257a0-8c73-4f03-b5a6-d520acd433ff service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Updated VIF entry in instance network info cache for port e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 764.296259] env[62627]: DEBUG nova.network.neutron [req-b86e64ea-2dd4-427b-8137-25ed4a81c778 req-cd3257a0-8c73-4f03-b5a6-d520acd433ff service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Updating instance_info_cache with network_info: [{"id": "e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991", "address": "fa:16:3e:f6:d8:65", "network": {"id": "d8e684f9-7aa5-4513-a4df-afb52c5cb804", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-671089707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d97a4a01696d4c4884df85a475c4bb7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape56ec5b3-09", "ovs_interfaceid": "e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.369134] env[62627]: DEBUG oslo_vmware.api [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194036, 'name': PowerOnVM_Task, 'duration_secs': 1.345278} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.369422] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 764.369623] env[62627]: INFO nova.compute.manager [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Took 10.98 seconds to spawn the instance on the hypervisor. [ 764.369800] env[62627]: DEBUG nova.compute.manager [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 764.370610] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f23a74-6bdc-408c-92c3-8da3013aaa9f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.410956] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522611b2-3e96-00a0-45ce-3c45b2abf9f8, 'name': SearchDatastore_Task, 'duration_secs': 0.01164} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.412027] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.412027] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] a397bce6-30c7-485d-81ab-88a8e021996c/a397bce6-30c7-485d-81ab-88a8e021996c.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 764.412027] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0dd3baa-39ea-4b1f-808d-d438054a7188 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.423079] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 764.423079] env[62627]: value = "task-2194040" [ 764.423079] env[62627]: _type = "Task" [ 764.423079] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.438320] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194040, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.446399] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.476212] env[62627]: DEBUG nova.compute.utils [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 764.477749] env[62627]: DEBUG nova.compute.manager [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 764.477987] env[62627]: DEBUG nova.network.neutron [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 764.558027] env[62627]: DEBUG nova.compute.manager [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 764.558361] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.559763] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2c6d41-f79b-48f4-88d4-895489ea6f4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.571883] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 764.572278] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e9c60b9-7564-4764-a0a6-3ccc3ef43ef1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.578806] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 764.578806] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.578806] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 764.578806] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.579096] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 764.579096] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 764.579096] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 764.579421] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 764.579511] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 764.579864] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 764.580015] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 764.588974] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-301f3873-d73f-4a5a-997f-3c49dcffb9cc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.610610] env[62627]: DEBUG nova.policy [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '61f771c92f944c26a4af4caf1a429325', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f831870fb0824e8e8353640736a11692', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 764.616648] env[62627]: DEBUG oslo_vmware.api [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 764.616648] env[62627]: value = "task-2194041" [ 764.616648] env[62627]: _type = "Task" [ 764.616648] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.624610] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 764.624610] env[62627]: value = "task-2194042" [ 764.624610] env[62627]: _type = "Task" [ 764.624610] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.636557] env[62627]: DEBUG oslo_vmware.api [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194041, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.644073] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.685763] env[62627]: DEBUG nova.virt.hardware [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 764.686045] env[62627]: DEBUG nova.virt.hardware [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.686209] env[62627]: DEBUG nova.virt.hardware [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 764.686390] env[62627]: DEBUG nova.virt.hardware [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.686529] env[62627]: DEBUG nova.virt.hardware [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 764.686691] env[62627]: DEBUG nova.virt.hardware [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 764.686898] env[62627]: DEBUG nova.virt.hardware [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 764.687387] env[62627]: DEBUG nova.virt.hardware [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 764.687631] env[62627]: DEBUG nova.virt.hardware [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 764.687806] env[62627]: DEBUG nova.virt.hardware [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 764.687978] env[62627]: DEBUG nova.virt.hardware [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 764.689079] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf84c2e-7686-4f39-a4ca-241b1e8ed76a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.698829] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11260d85-2385-422e-ab91-9a1c096a7122 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.715533] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 764.721497] env[62627]: DEBUG oslo.service.loopingcall [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 764.721943] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 764.722213] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed25adf5-f363-4632-ad7d-8692d7ff46ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.740381] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 764.740381] env[62627]: value = "task-2194043" [ 764.740381] env[62627]: _type = "Task" [ 764.740381] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.749661] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194043, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.799650] env[62627]: DEBUG oslo_concurrency.lockutils [req-b86e64ea-2dd4-427b-8137-25ed4a81c778 req-cd3257a0-8c73-4f03-b5a6-d520acd433ff service nova] Releasing lock "refresh_cache-a397bce6-30c7-485d-81ab-88a8e021996c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.807578] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "33bf3932-3746-445b-b0fe-611466cdd9b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.807994] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "33bf3932-3746-445b-b0fe-611466cdd9b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.808288] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "33bf3932-3746-445b-b0fe-611466cdd9b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.808588] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "33bf3932-3746-445b-b0fe-611466cdd9b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.808914] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "33bf3932-3746-445b-b0fe-611466cdd9b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.812318] env[62627]: INFO nova.compute.manager [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Terminating instance [ 764.893422] env[62627]: INFO nova.compute.manager [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Took 30.04 seconds to build instance. [ 764.940727] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194040, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.982048] env[62627]: DEBUG nova.compute.manager [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 765.127730] env[62627]: DEBUG oslo_vmware.api [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194041, 'name': PowerOffVM_Task, 'duration_secs': 0.253075} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.134061] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 765.134561] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 765.135956] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a9ab4c1-fb61-406b-ae31-53375eab3990 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.145009] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194042, 'name': ReconfigVM_Task, 'duration_secs': 0.225726} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.146411] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance 'ab999825-4310-4fd8-81f0-6bb5dde4175b' progress to 33 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 765.233535] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 765.233535] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 765.234568] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleting the datastore file [datastore1] e8c87637-4a30-4482-af21-f58ad8a218fc {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 765.237602] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e45bf7bc-811e-4e68-8168-63d6455ff876 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.247074] env[62627]: DEBUG oslo_vmware.api [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 765.247074] env[62627]: value = "task-2194045" [ 765.247074] env[62627]: _type = "Task" [ 765.247074] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.252951] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194043, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.263526] env[62627]: DEBUG oslo_vmware.api [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.322885] env[62627]: DEBUG nova.compute.manager [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 765.323503] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 765.324635] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51179b9-e398-4c8f-868b-4631b64ff822 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.334631] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 765.334986] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2248a54-9064-40ee-9e15-521aba756618 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.347080] env[62627]: DEBUG oslo_vmware.api [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 765.347080] env[62627]: value = "task-2194046" [ 765.347080] env[62627]: _type = "Task" [ 765.347080] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.360668] env[62627]: DEBUG oslo_vmware.api [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194046, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.396246] env[62627]: DEBUG oslo_concurrency.lockutils [None req-779b70f1-de1b-468b-9275-cb15bd514a90 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.550s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.436621] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194040, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727782} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.436903] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] a397bce6-30c7-485d-81ab-88a8e021996c/a397bce6-30c7-485d-81ab-88a8e021996c.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 765.437122] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.437385] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8217375c-57a1-4653-9d43-f682875c0f97 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.448020] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 765.448020] env[62627]: value = "task-2194047" [ 765.448020] env[62627]: _type = "Task" [ 765.448020] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.461035] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194047, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.490966] env[62627]: INFO nova.virt.block_device [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Booting with volume 51233852-f0e4-4bab-903c-b0575335358d at /dev/sda [ 765.567084] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f805d0d-1e0f-43b0-be56-33065ba4e3ad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.584378] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d512279-8d14-4e0d-9d10-792c63b98ee4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.620088] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ab3fa1e9-a4f3-4b0c-a28c-6b57f0ecd79d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.632040] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc4f4cb-77a0-458d-a218-94cf621ddb91 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.658249] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 765.658343] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.658973] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 765.658973] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.658973] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 765.658973] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 765.659184] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 765.659338] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 765.659484] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 765.659835] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 765.659835] env[62627]: DEBUG nova.virt.hardware [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 765.666044] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Reconfiguring VM instance instance-00000005 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 765.675606] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01214413-e511-4e89-b176-47ea94e120c9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.693779] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7951061-bfd1-4ecd-8a5d-d6061ca508eb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.697977] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45da2f50-11b5-43f5-b088-bfd60f74c19a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.707401] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f62356-166b-4bcf-98e7-1970ad5d3918 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.712205] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11789688-e7cf-49e5-b9d3-481620379ef5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.714092] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 765.714092] env[62627]: value = "task-2194048" [ 765.714092] env[62627]: _type = "Task" [ 765.714092] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.755390] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96bb7a5-faef-477b-b7bd-c00c4df32ac5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.758450] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194048, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.759452] env[62627]: DEBUG nova.virt.block_device [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Updating existing volume attachment record: 2428b311-93f7-4607-9204-bbaf2275a5d5 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 765.776255] env[62627]: DEBUG oslo_vmware.api [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190542} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.776547] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194043, 'name': CreateVM_Task, 'duration_secs': 0.655827} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.780021] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5bc51e-3554-480f-a601-86281e791254 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.782113] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.782324] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.782618] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.782835] env[62627]: INFO nova.compute.manager [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Took 1.22 seconds to destroy the instance on the hypervisor. [ 765.783126] env[62627]: DEBUG oslo.service.loopingcall [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.783270] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 765.783499] env[62627]: DEBUG nova.compute.manager [-] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 765.783588] env[62627]: DEBUG nova.network.neutron [-] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.785612] env[62627]: DEBUG oslo_concurrency.lockutils [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.786519] env[62627]: DEBUG oslo_concurrency.lockutils [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.786519] env[62627]: DEBUG oslo_concurrency.lockutils [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 765.789546] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-031d359f-041a-4a78-9b60-163dd57a26f3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.798361] env[62627]: DEBUG nova.compute.provider_tree [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.801607] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 765.801607] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c98830-bf4f-4973-cda7-1ea56d82e555" [ 765.801607] env[62627]: _type = "Task" [ 765.801607] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.818052] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c98830-bf4f-4973-cda7-1ea56d82e555, 'name': SearchDatastore_Task, 'duration_secs': 0.017859} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.818373] env[62627]: DEBUG oslo_concurrency.lockutils [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.818609] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 765.818841] env[62627]: DEBUG oslo_concurrency.lockutils [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.818986] env[62627]: DEBUG oslo_concurrency.lockutils [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.819200] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 765.819484] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5480219a-d7fa-4f79-be1d-2b3e5d85e99a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.838917] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 765.839975] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 765.840451] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7c707a8-a052-4717-9c78-d8238f31519b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.849382] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 765.849382] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c62bca-38b9-e591-28e2-c5c286c476ee" [ 765.849382] env[62627]: _type = "Task" [ 765.849382] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.866256] env[62627]: DEBUG oslo_vmware.api [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194046, 'name': PowerOffVM_Task, 'duration_secs': 0.210832} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.868380] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 765.868380] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 765.868525] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c62bca-38b9-e591-28e2-c5c286c476ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.868864] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fecf71b7-5d50-4b3c-a330-82db0ca22ec0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.901569] env[62627]: DEBUG nova.compute.manager [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 765.955338] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 765.957053] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 765.958336] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleting the datastore file [datastore1] 33bf3932-3746-445b-b0fe-611466cdd9b1 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 765.958336] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3a09c29-106e-471c-b963-d70c44e0b731 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.967367] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194047, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078162} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.968124] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.968977] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e80f5f3-cd2c-406a-8898-3d840d1eff35 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.974948] env[62627]: DEBUG oslo_vmware.api [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 765.974948] env[62627]: value = "task-2194050" [ 765.974948] env[62627]: _type = "Task" [ 765.974948] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.998110] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] a397bce6-30c7-485d-81ab-88a8e021996c/a397bce6-30c7-485d-81ab-88a8e021996c.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 765.999151] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f23e925-987a-4a80-b893-e24dc796f839 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.017529] env[62627]: DEBUG oslo_vmware.api [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.024943] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 766.024943] env[62627]: value = "task-2194051" [ 766.024943] env[62627]: _type = "Task" [ 766.024943] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.039531] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194051, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.225098] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194048, 'name': ReconfigVM_Task, 'duration_secs': 0.201644} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.225316] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Reconfigured VM instance instance-00000005 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 766.226106] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a39bdb-1397-4553-8746-c7334b55a211 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.249558] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] ab999825-4310-4fd8-81f0-6bb5dde4175b/ab999825-4310-4fd8-81f0-6bb5dde4175b.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 766.249855] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2c1d034-347a-4a0c-95ab-6637f9f892bf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.271206] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 766.271206] env[62627]: value = "task-2194052" [ 766.271206] env[62627]: _type = "Task" [ 766.271206] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.280929] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194052, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.305062] env[62627]: DEBUG nova.scheduler.client.report [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 766.368292] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c62bca-38b9-e591-28e2-c5c286c476ee, 'name': SearchDatastore_Task, 'duration_secs': 0.018066} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.369638] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2dc3d2eb-7738-4c2e-91ea-f39bd8f539ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.376366] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 766.376366] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]524653c9-1b29-a866-3659-5af133023a26" [ 766.376366] env[62627]: _type = "Task" [ 766.376366] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.385406] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524653c9-1b29-a866-3659-5af133023a26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.393309] env[62627]: DEBUG nova.network.neutron [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Successfully created port: 87592606-744f-4632-a44b-69f4b8ef9acf {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.431684] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.443904] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "d28285ed-2ec0-43a4-bd34-82d33816fea4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.444174] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "d28285ed-2ec0-43a4-bd34-82d33816fea4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.485746] env[62627]: DEBUG oslo_vmware.api [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.535211] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.598343] env[62627]: DEBUG nova.compute.manager [req-32cf9438-3058-4898-9d92-02842ec29f4a req-9b334efc-6c23-4f8c-ac14-cd85089e5b3c service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Received event network-changed-abec21e1-204b-4ed0-801a-ab9f9bb8fcb9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 766.598437] env[62627]: DEBUG nova.compute.manager [req-32cf9438-3058-4898-9d92-02842ec29f4a req-9b334efc-6c23-4f8c-ac14-cd85089e5b3c service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Refreshing instance network info cache due to event network-changed-abec21e1-204b-4ed0-801a-ab9f9bb8fcb9. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 766.598606] env[62627]: DEBUG oslo_concurrency.lockutils [req-32cf9438-3058-4898-9d92-02842ec29f4a req-9b334efc-6c23-4f8c-ac14-cd85089e5b3c service nova] Acquiring lock "refresh_cache-7691c2e2-4e43-4990-bf4d-2ef3298a4264" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.598753] env[62627]: DEBUG oslo_concurrency.lockutils [req-32cf9438-3058-4898-9d92-02842ec29f4a req-9b334efc-6c23-4f8c-ac14-cd85089e5b3c service nova] Acquired lock "refresh_cache-7691c2e2-4e43-4990-bf4d-2ef3298a4264" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.598923] env[62627]: DEBUG nova.network.neutron [req-32cf9438-3058-4898-9d92-02842ec29f4a req-9b334efc-6c23-4f8c-ac14-cd85089e5b3c service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Refreshing network info cache for port abec21e1-204b-4ed0-801a-ab9f9bb8fcb9 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 766.784575] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194052, 'name': ReconfigVM_Task, 'duration_secs': 0.306592} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.784922] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Reconfigured VM instance instance-00000005 to attach disk [datastore2] ab999825-4310-4fd8-81f0-6bb5dde4175b/ab999825-4310-4fd8-81f0-6bb5dde4175b.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.785214] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance 'ab999825-4310-4fd8-81f0-6bb5dde4175b' progress to 50 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 766.811949] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.836s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.811949] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 766.815554] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.891s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.816549] env[62627]: INFO nova.compute.claims [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.899250] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524653c9-1b29-a866-3659-5af133023a26, 'name': SearchDatastore_Task, 'duration_secs': 0.016433} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.899683] env[62627]: DEBUG oslo_concurrency.lockutils [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.900011] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 766.900286] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3fdf40f8-4e62-40c5-89c1-350410ee4208 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.909725] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 766.909725] env[62627]: value = "task-2194053" [ 766.909725] env[62627]: _type = "Task" [ 766.909725] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.919136] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194053, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.986766] env[62627]: DEBUG oslo_vmware.api [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194050, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.521084} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.987098] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 766.987334] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 766.987538] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 766.987747] env[62627]: INFO nova.compute.manager [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Took 1.66 seconds to destroy the instance on the hypervisor. [ 766.988023] env[62627]: DEBUG oslo.service.loopingcall [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.988231] env[62627]: DEBUG nova.compute.manager [-] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 766.988349] env[62627]: DEBUG nova.network.neutron [-] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 767.039471] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194051, 'name': ReconfigVM_Task, 'duration_secs': 0.774052} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.039471] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Reconfigured VM instance instance-00000010 to attach disk [datastore2] a397bce6-30c7-485d-81ab-88a8e021996c/a397bce6-30c7-485d-81ab-88a8e021996c.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 767.039910] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c466bfde-e2aa-474a-ba01-e78eb8dc39ad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.049588] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 767.049588] env[62627]: value = "task-2194054" [ 767.049588] env[62627]: _type = "Task" [ 767.049588] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.061236] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194054, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.293048] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c904b873-e370-4094-bb31-fcd9f07daea5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.317456] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f625375-7376-4166-b7b5-2a8edcc144c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.322515] env[62627]: DEBUG nova.compute.utils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 767.327026] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 767.327026] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 767.348356] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance 'ab999825-4310-4fd8-81f0-6bb5dde4175b' progress to 67 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 767.420624] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194053, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.564545] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194054, 'name': Rename_Task, 'duration_secs': 0.249292} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.564831] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 767.565087] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af642791-1ac0-460f-af06-e61cd911667f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.572486] env[62627]: DEBUG nova.policy [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eaa47bdec0e04f67a5bba36271ffeb83', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65a2f5e400f049089ba7a7d8a4a77e34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 767.575289] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 767.575289] env[62627]: value = "task-2194055" [ 767.575289] env[62627]: _type = "Task" [ 767.575289] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.586132] env[62627]: DEBUG nova.compute.manager [req-4cbc94dc-ce97-4881-a57f-3df2cbfa5308 req-963a1480-9ed5-4b56-92ca-bc19772b3901 service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Received event network-vif-deleted-a68f6bb7-6d15-4329-aedf-cc72bcc3e57a {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 767.586132] env[62627]: INFO nova.compute.manager [req-4cbc94dc-ce97-4881-a57f-3df2cbfa5308 req-963a1480-9ed5-4b56-92ca-bc19772b3901 service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Neutron deleted interface a68f6bb7-6d15-4329-aedf-cc72bcc3e57a; detaching it from the instance and deleting it from the info cache [ 767.586132] env[62627]: DEBUG nova.network.neutron [req-4cbc94dc-ce97-4881-a57f-3df2cbfa5308 req-963a1480-9ed5-4b56-92ca-bc19772b3901 service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.592176] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194055, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.689053] env[62627]: DEBUG nova.network.neutron [-] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.828302] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 767.922693] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194053, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.939268] env[62627]: DEBUG nova.compute.manager [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 767.939808] env[62627]: DEBUG nova.virt.hardware [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 767.940070] env[62627]: DEBUG nova.virt.hardware [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.940192] env[62627]: DEBUG nova.virt.hardware [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 767.940372] env[62627]: DEBUG nova.virt.hardware [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.940516] env[62627]: DEBUG nova.virt.hardware [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 767.940658] env[62627]: DEBUG nova.virt.hardware [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 767.940939] env[62627]: DEBUG nova.virt.hardware [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 767.941249] env[62627]: DEBUG nova.virt.hardware [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 767.941531] env[62627]: DEBUG nova.virt.hardware [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 767.941925] env[62627]: DEBUG nova.virt.hardware [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 767.942172] env[62627]: DEBUG nova.virt.hardware [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 767.946051] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15144f13-6394-468a-a042-4d3ae9b92466 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.956078] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906ddc59-364a-4c4b-8233-cb83c88c798a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.979211] env[62627]: DEBUG nova.network.neutron [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Port a7d230b4-8ae5-4c90-8ccf-806a4056e883 binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 768.089598] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194055, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.089829] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38d0bda0-b369-4f37-a30f-b6b07c7a4ea5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.099676] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df8eb8e-af6e-4033-8281-f540467ede6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.136103] env[62627]: DEBUG nova.compute.manager [req-4cbc94dc-ce97-4881-a57f-3df2cbfa5308 req-963a1480-9ed5-4b56-92ca-bc19772b3901 service nova] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Detach interface failed, port_id=a68f6bb7-6d15-4329-aedf-cc72bcc3e57a, reason: Instance e8c87637-4a30-4482-af21-f58ad8a218fc could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 768.186831] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98aefc44-c962-4523-b08f-5143e38b6e71 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.192602] env[62627]: INFO nova.compute.manager [-] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Took 2.41 seconds to deallocate network for instance. [ 768.201322] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a650544-7aff-4db9-94a7-31ee0c84c193 tempest-ServersAdminNegativeTestJSON-1381974535 tempest-ServersAdminNegativeTestJSON-1381974535-project-admin] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Suspending the VM {{(pid=62627) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 768.204839] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e0fee142-f5e6-4191-8411-87cae66a74c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.211504] env[62627]: DEBUG oslo_vmware.api [None req-0a650544-7aff-4db9-94a7-31ee0c84c193 tempest-ServersAdminNegativeTestJSON-1381974535 tempest-ServersAdminNegativeTestJSON-1381974535-project-admin] Waiting for the task: (returnval){ [ 768.211504] env[62627]: value = "task-2194056" [ 768.211504] env[62627]: _type = "Task" [ 768.211504] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.223406] env[62627]: DEBUG oslo_vmware.api [None req-0a650544-7aff-4db9-94a7-31ee0c84c193 tempest-ServersAdminNegativeTestJSON-1381974535 tempest-ServersAdminNegativeTestJSON-1381974535-project-admin] Task: {'id': task-2194056, 'name': SuspendVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.334257] env[62627]: DEBUG nova.network.neutron [req-32cf9438-3058-4898-9d92-02842ec29f4a req-9b334efc-6c23-4f8c-ac14-cd85089e5b3c service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Updated VIF entry in instance network info cache for port abec21e1-204b-4ed0-801a-ab9f9bb8fcb9. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 768.334257] env[62627]: DEBUG nova.network.neutron [req-32cf9438-3058-4898-9d92-02842ec29f4a req-9b334efc-6c23-4f8c-ac14-cd85089e5b3c service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Updating instance_info_cache with network_info: [{"id": "abec21e1-204b-4ed0-801a-ab9f9bb8fcb9", "address": "fa:16:3e:88:23:61", "network": {"id": "227e798c-693c-4b8a-ad80-f419e26c7366", "bridge": "br-int", "label": "tempest-ServersTestJSON-1035789165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bd77b83b3af479d9713f405098112ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cbd5e0e-9116-46f1-9748-13a73d2d7e75", "external-id": "nsx-vlan-transportzone-690", "segmentation_id": 690, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabec21e1-20", "ovs_interfaceid": "abec21e1-204b-4ed0-801a-ab9f9bb8fcb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.422186] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194053, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.537180] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6957ba8a-439b-4615-bb78-d21eb8a885dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.549603] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b60891-e120-4b1c-9881-e609bdd89f39 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.597485] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8b4274-07d4-49bd-b438-0e2497d4d97f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.611867] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81569663-0687-4ed7-915f-2a8439a94a3b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.616914] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194055, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.633739] env[62627]: DEBUG nova.compute.provider_tree [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.688259] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Successfully created port: 0f9d4335-0759-496e-8e45-fec3775be050 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.704716] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.727567] env[62627]: DEBUG oslo_vmware.api [None req-0a650544-7aff-4db9-94a7-31ee0c84c193 tempest-ServersAdminNegativeTestJSON-1381974535 tempest-ServersAdminNegativeTestJSON-1381974535-project-admin] Task: {'id': task-2194056, 'name': SuspendVM_Task} progress is 62%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.745975] env[62627]: DEBUG nova.network.neutron [-] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.840940] env[62627]: DEBUG oslo_concurrency.lockutils [req-32cf9438-3058-4898-9d92-02842ec29f4a req-9b334efc-6c23-4f8c-ac14-cd85089e5b3c service nova] Releasing lock "refresh_cache-7691c2e2-4e43-4990-bf4d-2ef3298a4264" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.845228] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 768.891796] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 768.891796] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 768.891796] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 768.892127] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 768.892127] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 768.892127] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 768.892127] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 768.892127] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 768.892386] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 768.892386] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 768.892386] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 768.892386] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab8e292-ca83-49ba-8581-5d9937ed983c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.901310] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dca891-80f2-4d9a-9acf-611fbe8a127a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.927147] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194053, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.659285} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.927719] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 768.927719] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 768.928193] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c77bbb7d-dcbb-4c85-8318-5a1f41f6278c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.940291] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 768.940291] env[62627]: value = "task-2194057" [ 768.940291] env[62627]: _type = "Task" [ 768.940291] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.950301] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194057, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.012823] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "ab999825-4310-4fd8-81f0-6bb5dde4175b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.013103] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.013275] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.108448] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194055, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.135865] env[62627]: DEBUG nova.scheduler.client.report [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 769.226484] env[62627]: DEBUG oslo_vmware.api [None req-0a650544-7aff-4db9-94a7-31ee0c84c193 tempest-ServersAdminNegativeTestJSON-1381974535 tempest-ServersAdminNegativeTestJSON-1381974535-project-admin] Task: {'id': task-2194056, 'name': SuspendVM_Task, 'duration_secs': 0.73643} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.226741] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a650544-7aff-4db9-94a7-31ee0c84c193 tempest-ServersAdminNegativeTestJSON-1381974535 tempest-ServersAdminNegativeTestJSON-1381974535-project-admin] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Suspended the VM {{(pid=62627) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 769.226923] env[62627]: DEBUG nova.compute.manager [None req-0a650544-7aff-4db9-94a7-31ee0c84c193 tempest-ServersAdminNegativeTestJSON-1381974535 tempest-ServersAdminNegativeTestJSON-1381974535-project-admin] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 769.229840] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d504e9e7-4b21-4d60-815a-cd2bef16dc5a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.252101] env[62627]: INFO nova.compute.manager [-] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Took 2.26 seconds to deallocate network for instance. [ 769.455594] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194057, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075029} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.455890] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 769.456784] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856f3755-c51c-40c7-8bfb-a9c211ab3aef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.484258] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.484258] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffa26a74-a286-40af-bcc3-2a1d76695422 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.520197] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 769.520197] env[62627]: value = "task-2194058" [ 769.520197] env[62627]: _type = "Task" [ 769.520197] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.531654] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194058, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.620016] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194055, 'name': PowerOnVM_Task} progress is 68%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.641252] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.826s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.641798] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 769.645122] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.929s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.645491] env[62627]: DEBUG nova.objects.instance [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Lazy-loading 'resources' on Instance uuid 1298559d-14fb-422c-8206-a35a747f0c06 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 769.767108] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.874556] env[62627]: DEBUG nova.network.neutron [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Successfully updated port: 87592606-744f-4632-a44b-69f4b8ef9acf {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 770.038562] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194058, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.060883] env[62627]: DEBUG nova.compute.manager [req-b01c6400-789c-40c0-8cd6-dffd24d0c325 req-5963c1e3-b9e7-4dac-9517-f51138f009d0 service nova] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Received event network-vif-deleted-ef05a825-d4fa-406d-b515-9d2a22422ddc {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 770.093883] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.094070] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.094239] env[62627]: DEBUG nova.network.neutron [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.114075] env[62627]: DEBUG oslo_vmware.api [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194055, 'name': PowerOnVM_Task, 'duration_secs': 2.120071} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.114347] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 770.114540] env[62627]: INFO nova.compute.manager [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Took 11.88 seconds to spawn the instance on the hypervisor. [ 770.114715] env[62627]: DEBUG nova.compute.manager [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 770.115524] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6d3a39-334c-4b8b-b46d-b48c77fde743 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.150512] env[62627]: DEBUG nova.compute.utils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 770.156703] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 770.156765] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 770.224032] env[62627]: DEBUG nova.policy [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eaa47bdec0e04f67a5bba36271ffeb83', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65a2f5e400f049089ba7a7d8a4a77e34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 770.379026] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Acquiring lock "refresh_cache-e5712962-ad8e-4cf2-aa47-275293609a13" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.379026] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Acquired lock "refresh_cache-e5712962-ad8e-4cf2-aa47-275293609a13" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.379026] env[62627]: DEBUG nova.network.neutron [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.542178] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194058, 'name': ReconfigVM_Task, 'duration_secs': 0.800477} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.546495] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.547353] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd62e4cf-7201-406a-92d0-0a1e7194f446 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.556621] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 770.556621] env[62627]: value = "task-2194059" [ 770.556621] env[62627]: _type = "Task" [ 770.556621] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.572942] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194059, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.643131] env[62627]: INFO nova.compute.manager [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Took 33.83 seconds to build instance. [ 770.657126] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 770.832275] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92178231-05cc-4b16-b9a5-c2648af8474b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.852788] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585d1a72-3da2-44d0-b87c-f519a8308c0b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.916956] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10bb4618-3473-43da-bcc7-bf82e1bfdde0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.926938] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f33fd0e-cfdd-4cbf-a3c9-4e6abcb25fc2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.949077] env[62627]: DEBUG nova.compute.provider_tree [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.004163] env[62627]: DEBUG nova.network.neutron [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.072714] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194059, 'name': Rename_Task, 'duration_secs': 0.2947} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.073223] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 771.073543] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fe32f4b-ad7c-4f24-883f-c5d49500341e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.086970] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 771.086970] env[62627]: value = "task-2194060" [ 771.086970] env[62627]: _type = "Task" [ 771.086970] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.097918] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194060, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.150178] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4cd83d4a-1049-4807-86b1-6d44625ca166 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.343s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.167265] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquiring lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.167468] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.268215] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Successfully created port: 6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 771.456396] env[62627]: DEBUG nova.scheduler.client.report [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 771.528127] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Successfully updated port: 0f9d4335-0759-496e-8e45-fec3775be050 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 771.531876] env[62627]: DEBUG nova.network.neutron [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance_info_cache with network_info: [{"id": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "address": "fa:16:3e:4c:84:35", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.153", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d230b4-8a", "ovs_interfaceid": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.540778] env[62627]: DEBUG nova.network.neutron [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Updating instance_info_cache with network_info: [{"id": "87592606-744f-4632-a44b-69f4b8ef9acf", "address": "fa:16:3e:79:cb:ab", "network": {"id": "b9125493-d85a-4d39-9114-1d2259e90e38", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1324862174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f831870fb0824e8e8353640736a11692", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87592606-74", "ovs_interfaceid": "87592606-744f-4632-a44b-69f4b8ef9acf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.602098] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194060, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.653604] env[62627]: DEBUG nova.compute.manager [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 771.671552] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 771.696355] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 771.696614] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 771.696816] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 771.697210] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 771.697650] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 771.697807] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 771.698205] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 771.698506] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 771.698695] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 771.698923] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 771.699164] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 771.700154] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1283c3-5d4e-4404-b0a7-8c47cac983f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.710010] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b618dbcc-7671-4694-8b65-fecd8cf5b64f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.962102] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.317s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.964843] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.146s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.970568] env[62627]: INFO nova.compute.claims [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 771.996698] env[62627]: INFO nova.scheduler.client.report [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Deleted allocations for instance 1298559d-14fb-422c-8206-a35a747f0c06 [ 772.034861] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "refresh_cache-15799515-ab7d-4e7c-a6e0-c740baf4f93a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.035382] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired lock "refresh_cache-15799515-ab7d-4e7c-a6e0-c740baf4f93a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.035382] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 772.036609] env[62627]: DEBUG oslo_concurrency.lockutils [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.042641] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Releasing lock "refresh_cache-e5712962-ad8e-4cf2-aa47-275293609a13" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.042771] env[62627]: DEBUG nova.compute.manager [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Instance network_info: |[{"id": "87592606-744f-4632-a44b-69f4b8ef9acf", "address": "fa:16:3e:79:cb:ab", "network": {"id": "b9125493-d85a-4d39-9114-1d2259e90e38", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1324862174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f831870fb0824e8e8353640736a11692", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87592606-74", "ovs_interfaceid": "87592606-744f-4632-a44b-69f4b8ef9acf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 772.043210] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:cb:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '411f389f-4e4f-4450-891e-38944cac6135', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87592606-744f-4632-a44b-69f4b8ef9acf', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.053856] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Creating folder: Project (f831870fb0824e8e8353640736a11692). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 772.058107] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-017be9f2-0a8c-4beb-bedc-e84e22361128 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.077109] env[62627]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 772.077109] env[62627]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62627) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 772.077109] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Folder already exists: Project (f831870fb0824e8e8353640736a11692). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 772.077109] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Creating folder: Instances. Parent ref: group-v447542. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 772.077109] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfc46342-c33d-4574-b40a-cbff89c5da71 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.089321] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Created folder: Instances in parent group-v447542. [ 772.090799] env[62627]: DEBUG oslo.service.loopingcall [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 772.099539] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 772.099822] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-edfae259-83e1-44c9-8012-d6e9a92cfede {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.125316] env[62627]: DEBUG oslo_vmware.api [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194060, 'name': PowerOnVM_Task, 'duration_secs': 0.881169} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.125520] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.125520] env[62627]: value = "task-2194063" [ 772.125520] env[62627]: _type = "Task" [ 772.125520] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.126418] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 772.126567] env[62627]: DEBUG nova.compute.manager [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 772.127460] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bdc3ec-28c8-4366-ae5f-f131a1ec1364 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.139902] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194063, 'name': CreateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.181568] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.257772] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "1129349d-65db-4e1d-bb08-cea793adf996" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.257994] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "1129349d-65db-4e1d-bb08-cea793adf996" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.506803] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10862f9b-0f1c-4b93-8fab-d832620bc520 tempest-ServerDiagnosticsV248Test-1341637702 tempest-ServerDiagnosticsV248Test-1341637702-project-member] Lock "1298559d-14fb-422c-8206-a35a747f0c06" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.322s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.582033] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.582033] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.583132] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.606547] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c982121d-e46b-478a-9efe-6178b9b7fd57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.650748] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cade02ff-1fd9-4171-917b-e9a765d01f52 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.658230] env[62627]: DEBUG nova.compute.manager [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Received event network-vif-plugged-87592606-744f-4632-a44b-69f4b8ef9acf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 772.659274] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Acquiring lock "e5712962-ad8e-4cf2-aa47-275293609a13-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.659870] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Lock "e5712962-ad8e-4cf2-aa47-275293609a13-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.659870] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Lock "e5712962-ad8e-4cf2-aa47-275293609a13-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.659870] env[62627]: DEBUG nova.compute.manager [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] No waiting events found dispatching network-vif-plugged-87592606-744f-4632-a44b-69f4b8ef9acf {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 772.660144] env[62627]: WARNING nova.compute.manager [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Received unexpected event network-vif-plugged-87592606-744f-4632-a44b-69f4b8ef9acf for instance with vm_state building and task_state spawning. [ 772.660144] env[62627]: DEBUG nova.compute.manager [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Received event network-changed-87592606-744f-4632-a44b-69f4b8ef9acf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 772.660296] env[62627]: DEBUG nova.compute.manager [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Refreshing instance network info cache due to event network-changed-87592606-744f-4632-a44b-69f4b8ef9acf. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 772.660473] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Acquiring lock "refresh_cache-e5712962-ad8e-4cf2-aa47-275293609a13" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.660646] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Acquired lock "refresh_cache-e5712962-ad8e-4cf2-aa47-275293609a13" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.660916] env[62627]: DEBUG nova.network.neutron [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Refreshing network info cache for port 87592606-744f-4632-a44b-69f4b8ef9acf {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 773.285364] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Successfully updated port: 6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 773.289447] env[62627]: DEBUG nova.compute.manager [req-a67f19ab-6d45-40d1-8243-19ad9e1eb02c req-f4e3ab90-f8c7-4759-8639-6c7de06ef1da service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Received event network-changed-e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 773.289627] env[62627]: DEBUG nova.compute.manager [req-a67f19ab-6d45-40d1-8243-19ad9e1eb02c req-f4e3ab90-f8c7-4759-8639-6c7de06ef1da service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Refreshing instance network info cache due to event network-changed-e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 773.289839] env[62627]: DEBUG oslo_concurrency.lockutils [req-a67f19ab-6d45-40d1-8243-19ad9e1eb02c req-f4e3ab90-f8c7-4759-8639-6c7de06ef1da service nova] Acquiring lock "refresh_cache-a397bce6-30c7-485d-81ab-88a8e021996c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.289985] env[62627]: DEBUG oslo_concurrency.lockutils [req-a67f19ab-6d45-40d1-8243-19ad9e1eb02c req-f4e3ab90-f8c7-4759-8639-6c7de06ef1da service nova] Acquired lock "refresh_cache-a397bce6-30c7-485d-81ab-88a8e021996c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.290159] env[62627]: DEBUG nova.network.neutron [req-a67f19ab-6d45-40d1-8243-19ad9e1eb02c req-f4e3ab90-f8c7-4759-8639-6c7de06ef1da service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Refreshing network info cache for port e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 773.304377] env[62627]: DEBUG oslo_concurrency.lockutils [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.305839] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "refresh_cache-f09c685d-9103-4c94-a44a-266aacc334f3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.308942] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired lock "refresh_cache-f09c685d-9103-4c94-a44a-266aacc334f3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.308942] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 773.308942] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.308942] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.309195] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance 'ab999825-4310-4fd8-81f0-6bb5dde4175b' progress to 83 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 773.310902] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194063, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.311093] env[62627]: WARNING oslo_vmware.common.loopingcall [-] task run outlasted interval by 0.165848 sec [ 773.322519] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194063, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.504443] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Updating instance_info_cache with network_info: [{"id": "0f9d4335-0759-496e-8e45-fec3775be050", "address": "fa:16:3e:3f:3d:d5", "network": {"id": "8b46c92c-9d4a-48df-bc33-b07572f6b585", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1386412703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65a2f5e400f049089ba7a7d8a4a77e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f9d4335-07", "ovs_interfaceid": "0f9d4335-0759-496e-8e45-fec3775be050", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.648799] env[62627]: DEBUG nova.network.neutron [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Updated VIF entry in instance network info cache for port 87592606-744f-4632-a44b-69f4b8ef9acf. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 773.649177] env[62627]: DEBUG nova.network.neutron [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Updating instance_info_cache with network_info: [{"id": "87592606-744f-4632-a44b-69f4b8ef9acf", "address": "fa:16:3e:79:cb:ab", "network": {"id": "b9125493-d85a-4d39-9114-1d2259e90e38", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1324862174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f831870fb0824e8e8353640736a11692", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87592606-74", "ovs_interfaceid": "87592606-744f-4632-a44b-69f4b8ef9acf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.815238] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 773.820107] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-476c1f80-a235-4cd0-b06f-15b6a7843883 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.835311] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194063, 'name': CreateVM_Task, 'duration_secs': 1.475317} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.836573] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.836951] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 773.836951] env[62627]: value = "task-2194064" [ 773.836951] env[62627]: _type = "Task" [ 773.836951] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.837593] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'attachment_id': '2428b311-93f7-4607-9204-bbaf2275a5d5', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447551', 'volume_id': '51233852-f0e4-4bab-903c-b0575335358d', 'name': 'volume-51233852-f0e4-4bab-903c-b0575335358d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e5712962-ad8e-4cf2-aa47-275293609a13', 'attached_at': '', 'detached_at': '', 'volume_id': '51233852-f0e4-4bab-903c-b0575335358d', 'serial': '51233852-f0e4-4bab-903c-b0575335358d'}, 'boot_index': 0, 'mount_device': '/dev/sda', 'disk_bus': None, 'device_type': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=62627) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 773.837807] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Root volume attach. Driver type: vmdk {{(pid=62627) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 773.840920] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10bf0a0-335c-4c57-82df-875f2194866f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.852384] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.855709] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b2410e-fce7-4ff2-8f51-8ec1774e946d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.863461] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908dfd27-2bbf-460b-aa76-40ea4b123c8d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.880020] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-b3cc9f99-f92c-4e34-900b-201dfb46a219 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.886304] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Waiting for the task: (returnval){ [ 773.886304] env[62627]: value = "task-2194065" [ 773.886304] env[62627]: _type = "Task" [ 773.886304] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.893590] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.901294] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194065, 'name': RelocateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.957942] env[62627]: INFO nova.compute.manager [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Rebuilding instance [ 773.981114] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328c69eb-30a8-40af-8cf0-7fd6a71f11b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.995486] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d6e2d6-d79f-4d37-b861-458538276061 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.035584] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Releasing lock "refresh_cache-15799515-ab7d-4e7c-a6e0-c740baf4f93a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.035939] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Instance network_info: |[{"id": "0f9d4335-0759-496e-8e45-fec3775be050", "address": "fa:16:3e:3f:3d:d5", "network": {"id": "8b46c92c-9d4a-48df-bc33-b07572f6b585", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1386412703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65a2f5e400f049089ba7a7d8a4a77e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f9d4335-07", "ovs_interfaceid": "0f9d4335-0759-496e-8e45-fec3775be050", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 774.043155] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:3d:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35e463c7-7d78-4d66-8efd-6127b1f3ee17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f9d4335-0759-496e-8e45-fec3775be050', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 774.050453] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Creating folder: Project (65a2f5e400f049089ba7a7d8a4a77e34). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 774.051254] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96961f89-56ff-4327-a72f-1ebb4f86b732 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.058615] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c7f01af7-6456-45c1-8125-2d0e13a5526a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.058615] env[62627]: DEBUG nova.compute.manager [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 774.059509] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171b2b7e-47b6-4138-9c0e-ff734a8834cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.065722] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ab5a1d-86ed-47a2-b1ca-c4a2cc885212 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.074864] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Created folder: Project (65a2f5e400f049089ba7a7d8a4a77e34) in parent group-v447541. [ 774.076779] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Creating folder: Instances. Parent ref: group-v447591. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 774.076779] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da18f7c1-c6fe-4419-b3d4-d71c25c4d071 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.086565] env[62627]: DEBUG nova.compute.provider_tree [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.097528] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Created folder: Instances in parent group-v447591. [ 774.097764] env[62627]: DEBUG oslo.service.loopingcall [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 774.097968] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 774.098727] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d09b8b8d-978c-4f41-8b3e-4c252a4ff04d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.122751] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 774.122751] env[62627]: value = "task-2194068" [ 774.122751] env[62627]: _type = "Task" [ 774.122751] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.132814] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194068, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.151646] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Releasing lock "refresh_cache-e5712962-ad8e-4cf2-aa47-275293609a13" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.151911] env[62627]: DEBUG nova.compute.manager [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Received event network-vif-plugged-0f9d4335-0759-496e-8e45-fec3775be050 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 774.152118] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Acquiring lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.152320] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.152476] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.152636] env[62627]: DEBUG nova.compute.manager [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] No waiting events found dispatching network-vif-plugged-0f9d4335-0759-496e-8e45-fec3775be050 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 774.152856] env[62627]: WARNING nova.compute.manager [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Received unexpected event network-vif-plugged-0f9d4335-0759-496e-8e45-fec3775be050 for instance with vm_state building and task_state spawning. [ 774.152948] env[62627]: DEBUG nova.compute.manager [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Received event network-changed-0f9d4335-0759-496e-8e45-fec3775be050 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 774.153398] env[62627]: DEBUG nova.compute.manager [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Refreshing instance network info cache due to event network-changed-0f9d4335-0759-496e-8e45-fec3775be050. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 774.153630] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Acquiring lock "refresh_cache-15799515-ab7d-4e7c-a6e0-c740baf4f93a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.153765] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Acquired lock "refresh_cache-15799515-ab7d-4e7c-a6e0-c740baf4f93a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.153981] env[62627]: DEBUG nova.network.neutron [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Refreshing network info cache for port 0f9d4335-0759-496e-8e45-fec3775be050 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.226519] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Updating instance_info_cache with network_info: [{"id": "6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee", "address": "fa:16:3e:67:1f:93", "network": {"id": "8b46c92c-9d4a-48df-bc33-b07572f6b585", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1386412703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65a2f5e400f049089ba7a7d8a4a77e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d227bba-fd", "ovs_interfaceid": "6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.304624] env[62627]: DEBUG nova.network.neutron [req-a67f19ab-6d45-40d1-8243-19ad9e1eb02c req-f4e3ab90-f8c7-4759-8639-6c7de06ef1da service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Updated VIF entry in instance network info cache for port e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 774.304976] env[62627]: DEBUG nova.network.neutron [req-a67f19ab-6d45-40d1-8243-19ad9e1eb02c req-f4e3ab90-f8c7-4759-8639-6c7de06ef1da service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Updating instance_info_cache with network_info: [{"id": "e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991", "address": "fa:16:3e:f6:d8:65", "network": {"id": "d8e684f9-7aa5-4513-a4df-afb52c5cb804", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-671089707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d97a4a01696d4c4884df85a475c4bb7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape56ec5b3-09", "ovs_interfaceid": "e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.352163] env[62627]: DEBUG oslo_vmware.api [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194064, 'name': PowerOnVM_Task, 'duration_secs': 0.472014} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.352565] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 774.352836] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-077da9ae-d23c-4f07-88d0-728d6d882d37 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance 'ab999825-4310-4fd8-81f0-6bb5dde4175b' progress to 100 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 774.399330] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194065, 'name': RelocateVM_Task} progress is 20%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.589300] env[62627]: DEBUG nova.scheduler.client.report [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 774.633344] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194068, 'name': CreateVM_Task, 'duration_secs': 0.388832} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.633522] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 774.634223] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.634384] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.634701] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 774.634945] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f2d07f2-ab26-40f6-a935-2fa82cc5f623 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.641134] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 774.641134] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5282d438-ac40-2a28-ef8e-a47a2de31b5d" [ 774.641134] env[62627]: _type = "Task" [ 774.641134] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.649675] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5282d438-ac40-2a28-ef8e-a47a2de31b5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.732811] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Releasing lock "refresh_cache-f09c685d-9103-4c94-a44a-266aacc334f3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.732811] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Instance network_info: |[{"id": "6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee", "address": "fa:16:3e:67:1f:93", "network": {"id": "8b46c92c-9d4a-48df-bc33-b07572f6b585", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1386412703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65a2f5e400f049089ba7a7d8a4a77e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d227bba-fd", "ovs_interfaceid": "6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 774.733235] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:1f:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35e463c7-7d78-4d66-8efd-6127b1f3ee17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 774.739250] env[62627]: DEBUG oslo.service.loopingcall [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 774.739901] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 774.743404] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cac2a8fa-9bb4-419a-9a27-10e482d98db6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.773266] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 774.773266] env[62627]: value = "task-2194069" [ 774.773266] env[62627]: _type = "Task" [ 774.773266] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.782673] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194069, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.808410] env[62627]: DEBUG oslo_concurrency.lockutils [req-a67f19ab-6d45-40d1-8243-19ad9e1eb02c req-f4e3ab90-f8c7-4759-8639-6c7de06ef1da service nova] Releasing lock "refresh_cache-a397bce6-30c7-485d-81ab-88a8e021996c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.838049] env[62627]: DEBUG nova.compute.manager [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Received event network-vif-plugged-6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 774.839062] env[62627]: DEBUG oslo_concurrency.lockutils [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] Acquiring lock "f09c685d-9103-4c94-a44a-266aacc334f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.839928] env[62627]: DEBUG oslo_concurrency.lockutils [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] Lock "f09c685d-9103-4c94-a44a-266aacc334f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.840414] env[62627]: DEBUG oslo_concurrency.lockutils [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] Lock "f09c685d-9103-4c94-a44a-266aacc334f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.842895] env[62627]: DEBUG nova.compute.manager [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] No waiting events found dispatching network-vif-plugged-6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 774.842895] env[62627]: WARNING nova.compute.manager [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Received unexpected event network-vif-plugged-6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee for instance with vm_state building and task_state spawning. [ 774.842895] env[62627]: DEBUG nova.compute.manager [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Received event network-changed-6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 774.842895] env[62627]: DEBUG nova.compute.manager [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Refreshing instance network info cache due to event network-changed-6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 774.842895] env[62627]: DEBUG oslo_concurrency.lockutils [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] Acquiring lock "refresh_cache-f09c685d-9103-4c94-a44a-266aacc334f3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.843491] env[62627]: DEBUG oslo_concurrency.lockutils [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] Acquired lock "refresh_cache-f09c685d-9103-4c94-a44a-266aacc334f3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.843491] env[62627]: DEBUG nova.network.neutron [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Refreshing network info cache for port 6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.899254] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194065, 'name': RelocateVM_Task, 'duration_secs': 0.56701} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.899923] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 774.900266] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447551', 'volume_id': '51233852-f0e4-4bab-903c-b0575335358d', 'name': 'volume-51233852-f0e4-4bab-903c-b0575335358d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e5712962-ad8e-4cf2-aa47-275293609a13', 'attached_at': '', 'detached_at': '', 'volume_id': '51233852-f0e4-4bab-903c-b0575335358d', 'serial': '51233852-f0e4-4bab-903c-b0575335358d'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 774.901137] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d222cf72-ce36-4ed7-8a8b-f9fa1ebbe632 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.920743] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b5878f-b0f2-4234-b0a2-c71732a2ab60 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.947379] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] volume-51233852-f0e4-4bab-903c-b0575335358d/volume-51233852-f0e4-4bab-903c-b0575335358d.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 774.950413] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17dcdc46-1c1c-473e-abcc-4172966cc7a3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.973376] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Waiting for the task: (returnval){ [ 774.973376] env[62627]: value = "task-2194070" [ 774.973376] env[62627]: _type = "Task" [ 774.973376] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.983905] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194070, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.021284] env[62627]: DEBUG nova.network.neutron [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Updated VIF entry in instance network info cache for port 0f9d4335-0759-496e-8e45-fec3775be050. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 775.021284] env[62627]: DEBUG nova.network.neutron [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Updating instance_info_cache with network_info: [{"id": "0f9d4335-0759-496e-8e45-fec3775be050", "address": "fa:16:3e:3f:3d:d5", "network": {"id": "8b46c92c-9d4a-48df-bc33-b07572f6b585", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1386412703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65a2f5e400f049089ba7a7d8a4a77e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f9d4335-07", "ovs_interfaceid": "0f9d4335-0759-496e-8e45-fec3775be050", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.080348] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 775.081441] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44c677d0-f8a5-4927-b81d-e351b5a61c3d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.091351] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Waiting for the task: (returnval){ [ 775.091351] env[62627]: value = "task-2194071" [ 775.091351] env[62627]: _type = "Task" [ 775.091351] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.097448] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.132s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.097448] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 775.102560] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.066s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.105109] env[62627]: INFO nova.compute.claims [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 775.114361] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194071, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.155219] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5282d438-ac40-2a28-ef8e-a47a2de31b5d, 'name': SearchDatastore_Task, 'duration_secs': 0.013595} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.155219] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.155219] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 775.155219] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.155413] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.155413] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 775.156186] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1aeb5687-2f44-4f57-8065-10b711d6b3c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.167573] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 775.167840] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 775.168771] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29ead19e-d51f-4add-bf11-ac2d1e14e3e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.177463] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 775.177463] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d6701b-2e2c-8bc9-b5fe-3040793044d7" [ 775.177463] env[62627]: _type = "Task" [ 775.177463] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.188984] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d6701b-2e2c-8bc9-b5fe-3040793044d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.285711] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194069, 'name': CreateVM_Task, 'duration_secs': 0.434461} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.285900] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 775.286600] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.287250] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.287250] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 775.287771] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38b718f3-cc05-4cfe-bf08-0c3ad7f95398 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.293052] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 775.293052] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520112c9-ffea-2744-b707-7e1d6ea9e441" [ 775.293052] env[62627]: _type = "Task" [ 775.293052] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.302335] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520112c9-ffea-2744-b707-7e1d6ea9e441, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.489138] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194070, 'name': ReconfigVM_Task, 'duration_secs': 0.420272} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.492327] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Reconfigured VM instance instance-00000011 to attach disk [datastore2] volume-51233852-f0e4-4bab-903c-b0575335358d/volume-51233852-f0e4-4bab-903c-b0575335358d.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.499031] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c474304d-0a03-47ca-927c-eed76e845654 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.516415] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Waiting for the task: (returnval){ [ 775.516415] env[62627]: value = "task-2194072" [ 775.516415] env[62627]: _type = "Task" [ 775.516415] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.522036] env[62627]: DEBUG oslo_concurrency.lockutils [req-71bd539a-038c-477a-86e3-5ca0139fad67 req-30a7ca3a-4aec-4e88-bb60-fbe1fd8e20a8 service nova] Releasing lock "refresh_cache-15799515-ab7d-4e7c-a6e0-c740baf4f93a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.528543] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194072, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.604803] env[62627]: DEBUG nova.compute.utils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 775.606280] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194071, 'name': PowerOffVM_Task, 'duration_secs': 0.136335} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.609213] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 775.609213] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 775.609213] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 775.609213] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 775.612234] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48632159-a2ce-4d64-a53e-68b9e65b57bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.620981] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 775.620981] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c526535e-c140-4b60-a0f5-fea4e0146ad4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.660681] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 775.660938] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 775.661157] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Deleting the datastore file [datastore2] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 775.661426] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af75d663-659d-4535-aede-5cb826eec95e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.680057] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Waiting for the task: (returnval){ [ 775.680057] env[62627]: value = "task-2194074" [ 775.680057] env[62627]: _type = "Task" [ 775.680057] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.697647] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d6701b-2e2c-8bc9-b5fe-3040793044d7, 'name': SearchDatastore_Task, 'duration_secs': 0.011401} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.697647] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.697647] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cd60c24-0134-4733-9976-4b8821e036a7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.703297] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 775.703297] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52387ac0-d775-0df1-6c8b-3a6353064409" [ 775.703297] env[62627]: _type = "Task" [ 775.703297] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.704721] env[62627]: DEBUG nova.policy [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eaa47bdec0e04f67a5bba36271ffeb83', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65a2f5e400f049089ba7a7d8a4a77e34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 775.716347] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52387ac0-d775-0df1-6c8b-3a6353064409, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.727926] env[62627]: DEBUG nova.network.neutron [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Updated VIF entry in instance network info cache for port 6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 775.728387] env[62627]: DEBUG nova.network.neutron [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Updating instance_info_cache with network_info: [{"id": "6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee", "address": "fa:16:3e:67:1f:93", "network": {"id": "8b46c92c-9d4a-48df-bc33-b07572f6b585", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1386412703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65a2f5e400f049089ba7a7d8a4a77e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d227bba-fd", "ovs_interfaceid": "6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.807040] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520112c9-ffea-2744-b707-7e1d6ea9e441, 'name': SearchDatastore_Task, 'duration_secs': 0.011028} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.807407] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.807724] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 775.807979] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.982495] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.982996] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.983276] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.983479] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.983649] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.986114] env[62627]: INFO nova.compute.manager [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Terminating instance [ 776.030312] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194072, 'name': ReconfigVM_Task, 'duration_secs': 0.243871} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.032137] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447551', 'volume_id': '51233852-f0e4-4bab-903c-b0575335358d', 'name': 'volume-51233852-f0e4-4bab-903c-b0575335358d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e5712962-ad8e-4cf2-aa47-275293609a13', 'attached_at': '', 'detached_at': '', 'volume_id': '51233852-f0e4-4bab-903c-b0575335358d', 'serial': '51233852-f0e4-4bab-903c-b0575335358d'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 776.032137] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-040ae309-ef30-45d2-89ff-1f291480e997 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.041617] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Waiting for the task: (returnval){ [ 776.041617] env[62627]: value = "task-2194075" [ 776.041617] env[62627]: _type = "Task" [ 776.041617] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.051705] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194075, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.108105] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 776.112983] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Successfully created port: ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.198249] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194074, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111097} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.202013] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 776.202635] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 776.202635] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 776.217956] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52387ac0-d775-0df1-6c8b-3a6353064409, 'name': SearchDatastore_Task, 'duration_secs': 0.021535} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.218242] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.218494] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 15799515-ab7d-4e7c-a6e0-c740baf4f93a/15799515-ab7d-4e7c-a6e0-c740baf4f93a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 776.218768] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.218945] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.219161] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ede3262b-f7da-42dc-827b-8771c571a46e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.221273] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13969f53-5ebb-4f16-b380-2154c78afef6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.234822] env[62627]: DEBUG oslo_concurrency.lockutils [req-627e87b6-c3f0-451e-8b9b-90b70442cc3d req-e27260ef-b3df-40f1-851d-cafbb314b322 service nova] Releasing lock "refresh_cache-f09c685d-9103-4c94-a44a-266aacc334f3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.236628] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 776.236628] env[62627]: value = "task-2194076" [ 776.236628] env[62627]: _type = "Task" [ 776.236628] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.239740] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 776.239740] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 776.245522] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45d9b255-32b4-43b6-80d0-5417c8fd7a61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.250403] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 776.250403] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525a1d45-60ff-644f-9e8c-ee86b629e088" [ 776.250403] env[62627]: _type = "Task" [ 776.250403] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.253755] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.266272] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525a1d45-60ff-644f-9e8c-ee86b629e088, 'name': SearchDatastore_Task, 'duration_secs': 0.010488} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.267138] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36bd2dcc-f90a-4b16-ba5c-ae64f5669261 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.275985] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 776.275985] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52181a0f-ed12-fcff-1bb7-28894473f89b" [ 776.275985] env[62627]: _type = "Task" [ 776.275985] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.285980] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52181a0f-ed12-fcff-1bb7-28894473f89b, 'name': SearchDatastore_Task, 'duration_secs': 0.009847} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.289831] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.290615] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] f09c685d-9103-4c94-a44a-266aacc334f3/f09c685d-9103-4c94-a44a-266aacc334f3.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 776.290809] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebd6ca16-27cd-4d33-906b-53ccd5596847 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.299758] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 776.299758] env[62627]: value = "task-2194077" [ 776.299758] env[62627]: _type = "Task" [ 776.299758] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.311743] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.489999] env[62627]: DEBUG nova.compute.manager [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 776.490454] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 776.491114] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96bd3d0-6c5a-43b7-864b-d8659e9c9e83 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.501478] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 776.503810] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b7459df-6d25-4157-b1de-ab12e6298953 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.562305] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194075, 'name': Rename_Task, 'duration_secs': 0.232227} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.562758] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 776.564638] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90608b86-abff-4db9-b78a-4127b617f015 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.578237] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Waiting for the task: (returnval){ [ 776.578237] env[62627]: value = "task-2194079" [ 776.578237] env[62627]: _type = "Task" [ 776.578237] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.594683] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194079, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.594977] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 776.595871] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 776.595871] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Deleting the datastore file [datastore2] f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 776.595871] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95aa0059-8477-4da6-802e-0e0733b1ab2d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.609020] env[62627]: DEBUG oslo_vmware.api [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 776.609020] env[62627]: value = "task-2194080" [ 776.609020] env[62627]: _type = "Task" [ 776.609020] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.630832] env[62627]: DEBUG oslo_vmware.api [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.662824] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "ab999825-4310-4fd8-81f0-6bb5dde4175b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.663602] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.663602] env[62627]: DEBUG nova.compute.manager [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Going to confirm migration 1 {{(pid=62627) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 776.756549] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194076, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517145} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.757203] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 15799515-ab7d-4e7c-a6e0-c740baf4f93a/15799515-ab7d-4e7c-a6e0-c740baf4f93a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 776.757445] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 776.757712] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2147567-fcd9-4bd9-bec1-8e7e413af385 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.771490] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 776.771490] env[62627]: value = "task-2194081" [ 776.771490] env[62627]: _type = "Task" [ 776.771490] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.789851] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194081, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.819158] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194077, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.876055] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae9c056-3afa-4bf9-b110-ec9b1e629094 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.889794] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a2d8d8-4d64-4adb-a35f-0b0e029a908d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.929470] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c69598-e78e-469d-8ac6-39adcccce431 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.944428] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e331bbf-b57c-4870-94c3-944f223f5bcb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.965614] env[62627]: DEBUG nova.compute.provider_tree [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 777.089820] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194079, 'name': PowerOnVM_Task} progress is 19%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.123405] env[62627]: DEBUG oslo_vmware.api [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.413482} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.123677] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 777.123860] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 777.124042] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 777.124217] env[62627]: INFO nova.compute.manager [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Took 0.63 seconds to destroy the instance on the hypervisor. [ 777.124484] env[62627]: DEBUG oslo.service.loopingcall [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 777.124669] env[62627]: DEBUG nova.compute.manager [-] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 777.124774] env[62627]: DEBUG nova.network.neutron [-] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 777.134934] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 777.162886] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 777.163180] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.163339] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 777.163539] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.163664] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 777.163806] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 777.164069] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 777.164827] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 777.165060] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 777.165766] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 777.166069] env[62627]: DEBUG nova.virt.hardware [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 777.166992] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417a1180-5bcc-4380-8767-44e8a36efdad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.181976] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0731f0fb-2f6c-4672-b3c6-4661dfa338ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.235655] env[62627]: DEBUG nova.virt.hardware [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 777.236131] env[62627]: DEBUG nova.virt.hardware [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.236428] env[62627]: DEBUG nova.virt.hardware [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 777.236747] env[62627]: DEBUG nova.virt.hardware [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.237112] env[62627]: DEBUG nova.virt.hardware [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 777.237455] env[62627]: DEBUG nova.virt.hardware [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 777.237776] env[62627]: DEBUG nova.virt.hardware [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 777.238079] env[62627]: DEBUG nova.virt.hardware [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 777.238365] env[62627]: DEBUG nova.virt.hardware [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 777.238662] env[62627]: DEBUG nova.virt.hardware [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 777.238950] env[62627]: DEBUG nova.virt.hardware [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 777.240251] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa19374-abeb-4622-9d8d-442148adcce5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.254030] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10c47d4-533f-43ff-adc3-75fd923f3144 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.266386] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.272256] env[62627]: DEBUG oslo.service.loopingcall [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 777.273622] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.273930] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.274225] env[62627]: DEBUG nova.network.neutron [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.274507] env[62627]: DEBUG nova.objects.instance [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lazy-loading 'info_cache' on Instance uuid ab999825-4310-4fd8-81f0-6bb5dde4175b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 777.275956] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.279176] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35946d04-86a5-4ab0-97ee-c1556bc12b01 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.298894] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194081, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10779} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.300618] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.301075] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.301075] env[62627]: value = "task-2194082" [ 777.301075] env[62627]: _type = "Task" [ 777.301075] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.301980] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9322d47-fe9e-4917-88fc-ec458376ffdc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.335535] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 15799515-ab7d-4e7c-a6e0-c740baf4f93a/15799515-ab7d-4e7c-a6e0-c740baf4f93a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 777.342212] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8aa5b698-6b99-4508-aeba-a3179d604d28 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.356225] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194077, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.801354} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.356437] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194082, 'name': CreateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.356694] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] f09c685d-9103-4c94-a44a-266aacc334f3/f09c685d-9103-4c94-a44a-266aacc334f3.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 777.356900] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 777.357589] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a2c00aa-c690-4d6e-8fcb-515fa582eb83 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.364284] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 777.364284] env[62627]: value = "task-2194083" [ 777.364284] env[62627]: _type = "Task" [ 777.364284] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.366246] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 777.366246] env[62627]: value = "task-2194084" [ 777.366246] env[62627]: _type = "Task" [ 777.366246] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.380580] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194084, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.384262] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194083, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.499233] env[62627]: ERROR nova.scheduler.client.report [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [req-b9a542e7-3fea-432c-bc9d-d6493a19a839] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b9a542e7-3fea-432c-bc9d-d6493a19a839"}]} [ 777.518017] env[62627]: DEBUG nova.scheduler.client.report [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 777.543056] env[62627]: DEBUG nova.scheduler.client.report [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 777.543056] env[62627]: DEBUG nova.compute.provider_tree [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 777.557368] env[62627]: DEBUG nova.scheduler.client.report [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 777.578609] env[62627]: DEBUG nova.scheduler.client.report [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 777.594327] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194079, 'name': PowerOnVM_Task} progress is 91%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.750076] env[62627]: DEBUG nova.compute.manager [req-99d26bd4-28b4-41c1-9110-c7e048205093 req-ff6441ef-045c-49c7-942e-a48ed0212daa service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Received event network-vif-deleted-6de87f0c-6ba0-47bb-8c42-aa150e7855fe {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 777.750305] env[62627]: INFO nova.compute.manager [req-99d26bd4-28b4-41c1-9110-c7e048205093 req-ff6441ef-045c-49c7-942e-a48ed0212daa service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Neutron deleted interface 6de87f0c-6ba0-47bb-8c42-aa150e7855fe; detaching it from the instance and deleting it from the info cache [ 777.750525] env[62627]: DEBUG nova.network.neutron [req-99d26bd4-28b4-41c1-9110-c7e048205093 req-ff6441ef-045c-49c7-942e-a48ed0212daa service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.825714] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194082, 'name': CreateVM_Task, 'duration_secs': 0.391559} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.826164] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 777.826375] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.826625] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.826942] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 777.827215] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-906b3be0-509d-45f2-bc5f-0a8c0b7f2702 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.834405] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Waiting for the task: (returnval){ [ 777.834405] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d8bc94-c9bd-58e8-a46c-a94848a0b039" [ 777.834405] env[62627]: _type = "Task" [ 777.834405] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.845402] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d8bc94-c9bd-58e8-a46c-a94848a0b039, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.884358] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194083, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.897988] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194084, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098918} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.898249] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.901027] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100a717d-20a3-46ed-855a-a9dbefff955e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.925648] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] f09c685d-9103-4c94-a44a-266aacc334f3/f09c685d-9103-4c94-a44a-266aacc334f3.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 777.928845] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edea6e4a-3aab-4f0e-ad4c-d4eda8e1b492 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.952136] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 777.952136] env[62627]: value = "task-2194085" [ 777.952136] env[62627]: _type = "Task" [ 777.952136] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.955214] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Successfully updated port: ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 777.964527] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194085, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.046319] env[62627]: DEBUG nova.network.neutron [-] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.091972] env[62627]: DEBUG oslo_vmware.api [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194079, 'name': PowerOnVM_Task, 'duration_secs': 1.044262} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.092286] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 778.092497] env[62627]: INFO nova.compute.manager [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Took 10.15 seconds to spawn the instance on the hypervisor. [ 778.092676] env[62627]: DEBUG nova.compute.manager [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 778.097019] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a184b43b-d6e9-49b7-9bbd-a5b322b14ae2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.238922] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fa8f98-a893-45a6-9250-77dd1539ae84 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.247210] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d833a41a-4fe5-47d3-8ea7-15c5938f7b7f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.253233] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34436aea-6ba6-4a36-9f8f-74661f954c9c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.286769] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca25dc1d-0fb4-4ac0-97b1-802858eba8f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.295568] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12026538-ce7d-4454-90bb-0279c97cd79e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.315308] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5e33d5-9cc7-439f-80b0-76d4f7c8440c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.332663] env[62627]: DEBUG nova.compute.provider_tree [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.346582] env[62627]: DEBUG nova.compute.manager [req-99d26bd4-28b4-41c1-9110-c7e048205093 req-ff6441ef-045c-49c7-942e-a48ed0212daa service nova] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Detach interface failed, port_id=6de87f0c-6ba0-47bb-8c42-aa150e7855fe, reason: Instance f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 778.348272] env[62627]: DEBUG nova.scheduler.client.report [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 778.361075] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d8bc94-c9bd-58e8-a46c-a94848a0b039, 'name': SearchDatastore_Task, 'duration_secs': 0.010774} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.361733] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.362056] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 778.362317] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.362467] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.363387] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 778.363387] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6429aabf-ecca-43b5-8d77-b360f8fa209f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.374857] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.374857] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 778.379329] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4762f049-c410-41d1-bd05-22219e8162cc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.381520] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194083, 'name': ReconfigVM_Task, 'duration_secs': 0.89712} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.382648] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 15799515-ab7d-4e7c-a6e0-c740baf4f93a/15799515-ab7d-4e7c-a6e0-c740baf4f93a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.383579] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1fdb62f-0599-431d-a0d8-e61c72c5bf90 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.386390] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Waiting for the task: (returnval){ [ 778.386390] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52561a88-984c-7b27-ac49-d1c198fe8392" [ 778.386390] env[62627]: _type = "Task" [ 778.386390] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.391837] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 778.391837] env[62627]: value = "task-2194086" [ 778.391837] env[62627]: _type = "Task" [ 778.391837] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.398311] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52561a88-984c-7b27-ac49-d1c198fe8392, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.404374] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194086, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.458895] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "refresh_cache-76fb58dc-df20-464b-bda0-dcf09d69c718" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.458895] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired lock "refresh_cache-76fb58dc-df20-464b-bda0-dcf09d69c718" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.458895] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 778.462354] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.550393] env[62627]: INFO nova.compute.manager [-] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Took 1.43 seconds to deallocate network for instance. [ 778.616022] env[62627]: INFO nova.compute.manager [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Took 36.10 seconds to build instance. [ 778.663196] env[62627]: DEBUG nova.network.neutron [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance_info_cache with network_info: [{"id": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "address": "fa:16:3e:4c:84:35", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.153", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d230b4-8a", "ovs_interfaceid": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.856974] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.754s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.857525] env[62627]: DEBUG nova.compute.manager [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 778.863864] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.246s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.865797] env[62627]: INFO nova.compute.claims [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.900385] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52561a88-984c-7b27-ac49-d1c198fe8392, 'name': SearchDatastore_Task, 'duration_secs': 0.011149} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.902702] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d4ea517-7e08-4eb7-8218-e51939b6c703 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.906745] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194086, 'name': Rename_Task, 'duration_secs': 0.365679} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.907409] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 778.907656] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd32cd2c-f0d2-48af-8274-24775238bc0d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.910676] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Waiting for the task: (returnval){ [ 778.910676] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]526a547d-71af-927d-fc68-d8daa4296ba7" [ 778.910676] env[62627]: _type = "Task" [ 778.910676] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.916901] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 778.916901] env[62627]: value = "task-2194087" [ 778.916901] env[62627]: _type = "Task" [ 778.916901] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.922913] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526a547d-71af-927d-fc68-d8daa4296ba7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.930181] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.962230] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194085, 'name': ReconfigVM_Task, 'duration_secs': 0.8706} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.962519] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Reconfigured VM instance instance-00000013 to attach disk [datastore1] f09c685d-9103-4c94-a44a-266aacc334f3/f09c685d-9103-4c94-a44a-266aacc334f3.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.964999] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a4995df-474c-4d4f-b607-3493df96d4f3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.973545] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 778.973545] env[62627]: value = "task-2194088" [ 778.973545] env[62627]: _type = "Task" [ 778.973545] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.984279] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194088, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.008396] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.057620] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.116997] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc037c67-a9dd-4b70-b0b1-35065e686b5a tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Lock "e5712962-ad8e-4cf2-aa47-275293609a13" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.499s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.164626] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.164626] env[62627]: DEBUG nova.objects.instance [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lazy-loading 'migration_context' on Instance uuid ab999825-4310-4fd8-81f0-6bb5dde4175b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 779.199372] env[62627]: DEBUG nova.network.neutron [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Updating instance_info_cache with network_info: [{"id": "ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b", "address": "fa:16:3e:ac:d8:ca", "network": {"id": "8b46c92c-9d4a-48df-bc33-b07572f6b585", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1386412703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65a2f5e400f049089ba7a7d8a4a77e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffc6e4e3-3d", "ovs_interfaceid": "ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.372905] env[62627]: DEBUG nova.compute.utils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 779.377755] env[62627]: DEBUG nova.compute.manager [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 779.377755] env[62627]: DEBUG nova.network.neutron [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 779.427752] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526a547d-71af-927d-fc68-d8daa4296ba7, 'name': SearchDatastore_Task, 'duration_secs': 0.011483} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.427752] env[62627]: DEBUG nova.policy [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb8795655b7b41fabaaf2b49fe46e4d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4edac85f0a824e4798b50cf27575dfdb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 779.432477] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.433204] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 779.433912] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2e46d82-c7ea-417c-b931-25b154c7322e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.444940] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194087, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.446548] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Waiting for the task: (returnval){ [ 779.446548] env[62627]: value = "task-2194089" [ 779.446548] env[62627]: _type = "Task" [ 779.446548] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.460374] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194089, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.486986] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194088, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.621550] env[62627]: DEBUG nova.compute.manager [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 779.667209] env[62627]: DEBUG nova.objects.base [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 779.672857] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde59a19-560f-4e1f-aeb9-f705128dafe8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.683255] env[62627]: DEBUG nova.compute.manager [req-7e2032b3-d44a-4754-a6be-3f6746a1642a req-140489a6-97e0-418d-a63e-a6b3b22a4eea service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Received event network-changed-87592606-744f-4632-a44b-69f4b8ef9acf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 779.683255] env[62627]: DEBUG nova.compute.manager [req-7e2032b3-d44a-4754-a6be-3f6746a1642a req-140489a6-97e0-418d-a63e-a6b3b22a4eea service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Refreshing instance network info cache due to event network-changed-87592606-744f-4632-a44b-69f4b8ef9acf. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 779.683255] env[62627]: DEBUG oslo_concurrency.lockutils [req-7e2032b3-d44a-4754-a6be-3f6746a1642a req-140489a6-97e0-418d-a63e-a6b3b22a4eea service nova] Acquiring lock "refresh_cache-e5712962-ad8e-4cf2-aa47-275293609a13" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.683526] env[62627]: DEBUG oslo_concurrency.lockutils [req-7e2032b3-d44a-4754-a6be-3f6746a1642a req-140489a6-97e0-418d-a63e-a6b3b22a4eea service nova] Acquired lock "refresh_cache-e5712962-ad8e-4cf2-aa47-275293609a13" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.683858] env[62627]: DEBUG nova.network.neutron [req-7e2032b3-d44a-4754-a6be-3f6746a1642a req-140489a6-97e0-418d-a63e-a6b3b22a4eea service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Refreshing network info cache for port 87592606-744f-4632-a44b-69f4b8ef9acf {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 779.702851] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Releasing lock "refresh_cache-76fb58dc-df20-464b-bda0-dcf09d69c718" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.704420] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Instance network_info: |[{"id": "ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b", "address": "fa:16:3e:ac:d8:ca", "network": {"id": "8b46c92c-9d4a-48df-bc33-b07572f6b585", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1386412703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65a2f5e400f049089ba7a7d8a4a77e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffc6e4e3-3d", "ovs_interfaceid": "ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 779.707579] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:d8:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35e463c7-7d78-4d66-8efd-6127b1f3ee17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 779.716429] env[62627]: DEBUG oslo.service.loopingcall [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 779.717335] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3398b7bd-31c2-4ae6-9801-4de4d2a7707d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.720968] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 779.725172] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c86b589d-883f-4f6f-8322-13d749c692f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.745127] env[62627]: DEBUG oslo_vmware.api [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 779.745127] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5263c8c1-e0a9-4ae6-6369-a16cf5fcc0a3" [ 779.745127] env[62627]: _type = "Task" [ 779.745127] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.750493] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 779.750493] env[62627]: value = "task-2194090" [ 779.750493] env[62627]: _type = "Task" [ 779.750493] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.765226] env[62627]: DEBUG oslo_vmware.api [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5263c8c1-e0a9-4ae6-6369-a16cf5fcc0a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.769672] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194090, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.883952] env[62627]: DEBUG nova.compute.manager [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 779.933214] env[62627]: DEBUG nova.network.neutron [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Successfully created port: 4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 779.943241] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194087, 'name': PowerOnVM_Task, 'duration_secs': 0.987795} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.946043] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 779.946260] env[62627]: INFO nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Took 11.10 seconds to spawn the instance on the hypervisor. [ 779.946508] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 779.948053] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5cac47-3d65-4f43-82c0-83f932562e26 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.968376] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194089, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.990848] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194088, 'name': Rename_Task, 'duration_secs': 0.965459} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.991240] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 779.993733] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64323fb7-7692-4caa-9656-a9fa8818135d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.003151] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 780.003151] env[62627]: value = "task-2194091" [ 780.003151] env[62627]: _type = "Task" [ 780.003151] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.007667] env[62627]: DEBUG nova.compute.manager [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Received event network-vif-plugged-ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 780.008122] env[62627]: DEBUG oslo_concurrency.lockutils [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] Acquiring lock "76fb58dc-df20-464b-bda0-dcf09d69c718-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.008122] env[62627]: DEBUG oslo_concurrency.lockutils [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] Lock "76fb58dc-df20-464b-bda0-dcf09d69c718-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.008291] env[62627]: DEBUG oslo_concurrency.lockutils [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] Lock "76fb58dc-df20-464b-bda0-dcf09d69c718-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.008496] env[62627]: DEBUG nova.compute.manager [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] No waiting events found dispatching network-vif-plugged-ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 780.008719] env[62627]: WARNING nova.compute.manager [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Received unexpected event network-vif-plugged-ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b for instance with vm_state building and task_state spawning. [ 780.008948] env[62627]: DEBUG nova.compute.manager [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Received event network-changed-ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 780.009183] env[62627]: DEBUG nova.compute.manager [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Refreshing instance network info cache due to event network-changed-ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 780.009379] env[62627]: DEBUG oslo_concurrency.lockutils [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] Acquiring lock "refresh_cache-76fb58dc-df20-464b-bda0-dcf09d69c718" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.009516] env[62627]: DEBUG oslo_concurrency.lockutils [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] Acquired lock "refresh_cache-76fb58dc-df20-464b-bda0-dcf09d69c718" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.009670] env[62627]: DEBUG nova.network.neutron [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Refreshing network info cache for port ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 780.029307] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194091, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.145117] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.248481] env[62627]: DEBUG nova.network.neutron [req-7e2032b3-d44a-4754-a6be-3f6746a1642a req-140489a6-97e0-418d-a63e-a6b3b22a4eea service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Updated VIF entry in instance network info cache for port 87592606-744f-4632-a44b-69f4b8ef9acf. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.248866] env[62627]: DEBUG nova.network.neutron [req-7e2032b3-d44a-4754-a6be-3f6746a1642a req-140489a6-97e0-418d-a63e-a6b3b22a4eea service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Updating instance_info_cache with network_info: [{"id": "87592606-744f-4632-a44b-69f4b8ef9acf", "address": "fa:16:3e:79:cb:ab", "network": {"id": "b9125493-d85a-4d39-9114-1d2259e90e38", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1324862174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f831870fb0824e8e8353640736a11692", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87592606-74", "ovs_interfaceid": "87592606-744f-4632-a44b-69f4b8ef9acf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.267383] env[62627]: DEBUG oslo_vmware.api [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5263c8c1-e0a9-4ae6-6369-a16cf5fcc0a3, 'name': SearchDatastore_Task, 'duration_secs': 0.021187} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.268175] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.274445] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194090, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.464031] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194089, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561603} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.466715] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 780.466912] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 780.468550] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56166cfe-2fa9-482a-9438-86991d7d4dc3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.475822] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Waiting for the task: (returnval){ [ 780.475822] env[62627]: value = "task-2194092" [ 780.475822] env[62627]: _type = "Task" [ 780.475822] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.493985] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194092, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.500117] env[62627]: INFO nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Took 34.70 seconds to build instance. [ 780.515849] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194091, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.579404] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5600cfec-44a5-4238-99e2-2d31db73a487 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.591229] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85dd8789-5d3e-455c-b605-cf1e0814718f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.646056] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9166c1-7708-40d8-b936-82ff08ae686d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.658502] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa01c8b-b29d-404b-98f4-7e6000d47392 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.691679] env[62627]: DEBUG nova.compute.provider_tree [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 780.758620] env[62627]: DEBUG oslo_concurrency.lockutils [req-7e2032b3-d44a-4754-a6be-3f6746a1642a req-140489a6-97e0-418d-a63e-a6b3b22a4eea service nova] Releasing lock "refresh_cache-e5712962-ad8e-4cf2-aa47-275293609a13" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.768037] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194090, 'name': CreateVM_Task, 'duration_secs': 0.534048} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.768211] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 780.769023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.769023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.769312] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 780.769555] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3837e7fe-3982-4167-86f8-9fc6176990a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.776165] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 780.776165] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ec6591-8f36-d747-a8d6-6c740630aff7" [ 780.776165] env[62627]: _type = "Task" [ 780.776165] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.784432] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ec6591-8f36-d747-a8d6-6c740630aff7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.859795] env[62627]: DEBUG nova.network.neutron [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Updated VIF entry in instance network info cache for port ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.860183] env[62627]: DEBUG nova.network.neutron [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Updating instance_info_cache with network_info: [{"id": "ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b", "address": "fa:16:3e:ac:d8:ca", "network": {"id": "8b46c92c-9d4a-48df-bc33-b07572f6b585", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1386412703-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65a2f5e400f049089ba7a7d8a4a77e34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffc6e4e3-3d", "ovs_interfaceid": "ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.899324] env[62627]: DEBUG nova.compute.manager [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 780.927380] env[62627]: DEBUG nova.virt.hardware [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 780.928126] env[62627]: DEBUG nova.virt.hardware [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 780.928126] env[62627]: DEBUG nova.virt.hardware [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 780.928126] env[62627]: DEBUG nova.virt.hardware [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 780.928424] env[62627]: DEBUG nova.virt.hardware [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 780.928767] env[62627]: DEBUG nova.virt.hardware [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 780.929057] env[62627]: DEBUG nova.virt.hardware [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 780.929261] env[62627]: DEBUG nova.virt.hardware [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 780.929517] env[62627]: DEBUG nova.virt.hardware [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 780.929786] env[62627]: DEBUG nova.virt.hardware [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 780.930066] env[62627]: DEBUG nova.virt.hardware [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 780.931117] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d687c3-e439-4cbe-bb72-b8e751614ad5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.940502] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945638a9-ece2-436f-9ed2-eaff069830e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.985868] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194092, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075749} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.986179] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 780.986947] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226eab97-f3ac-468e-b5ae-d7989d4aea56 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.006846] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 781.007344] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.738s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.010748] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fad14dec-6f6d-43f7-84aa-f2a50dc17d9e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.032076] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194091, 'name': PowerOnVM_Task, 'duration_secs': 0.607976} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.033463] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 781.033681] env[62627]: INFO nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Took 9.36 seconds to spawn the instance on the hypervisor. [ 781.033858] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.034233] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Waiting for the task: (returnval){ [ 781.034233] env[62627]: value = "task-2194093" [ 781.034233] env[62627]: _type = "Task" [ 781.034233] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.035509] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021ffae7-9401-441f-8cc5-928ce739426d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.051542] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194093, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.226074] env[62627]: DEBUG nova.scheduler.client.report [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 50 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 781.226345] env[62627]: DEBUG nova.compute.provider_tree [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 50 to 51 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 781.226526] env[62627]: DEBUG nova.compute.provider_tree [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 781.286334] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ec6591-8f36-d747-a8d6-6c740630aff7, 'name': SearchDatastore_Task, 'duration_secs': 0.027001} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.286663] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.286896] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 781.287550] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.287550] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.287550] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.287705] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7461b779-d628-4dbf-83ad-d5ac596df1e7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.299987] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.300112] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 781.300770] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f79033d4-8e75-4e5a-a4f2-12a1135c3245 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.307501] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 781.307501] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529da61c-9cce-5043-ec60-7ff47d97c124" [ 781.307501] env[62627]: _type = "Task" [ 781.307501] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.315670] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529da61c-9cce-5043-ec60-7ff47d97c124, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.363527] env[62627]: DEBUG oslo_concurrency.lockutils [req-341ddebc-57bd-42f3-b25d-93f30bcfa7fa req-dea51b60-b629-46b4-834a-91336317968d service nova] Releasing lock "refresh_cache-76fb58dc-df20-464b-bda0-dcf09d69c718" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.527083] env[62627]: DEBUG nova.compute.manager [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 781.551145] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194093, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.565109] env[62627]: INFO nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Took 35.67 seconds to build instance. [ 781.734473] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.868s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.734473] env[62627]: DEBUG nova.compute.manager [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 781.738898] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.112s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.739528] env[62627]: INFO nova.compute.claims [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.797140] env[62627]: DEBUG nova.network.neutron [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Successfully updated port: 4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 781.823385] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529da61c-9cce-5043-ec60-7ff47d97c124, 'name': SearchDatastore_Task, 'duration_secs': 0.04848} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.823890] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-438213f6-191c-4482-92f3-cd2e554a2ee0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.831860] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 781.831860] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522b9f97-606c-06ec-9ef0-1a0236758c1a" [ 781.831860] env[62627]: _type = "Task" [ 781.831860] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.845364] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522b9f97-606c-06ec-9ef0-1a0236758c1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.050025] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194093, 'name': ReconfigVM_Task, 'duration_secs': 0.642931} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.051106] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a/63e884c4-2cc1-431d-8a7f-1c4259b29e1a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 782.051106] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae4d0fa0-453f-425a-b45d-fc25a1595060 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.053876] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.059546] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Waiting for the task: (returnval){ [ 782.059546] env[62627]: value = "task-2194094" [ 782.059546] env[62627]: _type = "Task" [ 782.059546] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.065522] env[62627]: DEBUG nova.compute.manager [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Received event network-vif-plugged-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 782.065522] env[62627]: DEBUG oslo_concurrency.lockutils [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] Acquiring lock "fffa23fe-3ce2-4879-b72d-11275d298504-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.065522] env[62627]: DEBUG oslo_concurrency.lockutils [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] Lock "fffa23fe-3ce2-4879-b72d-11275d298504-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.065522] env[62627]: DEBUG oslo_concurrency.lockutils [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] Lock "fffa23fe-3ce2-4879-b72d-11275d298504-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.065522] env[62627]: DEBUG nova.compute.manager [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] No waiting events found dispatching network-vif-plugged-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 782.065847] env[62627]: WARNING nova.compute.manager [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Received unexpected event network-vif-plugged-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 for instance with vm_state building and task_state spawning. [ 782.065991] env[62627]: DEBUG nova.compute.manager [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Received event network-changed-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 782.066233] env[62627]: DEBUG nova.compute.manager [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Refreshing instance network info cache due to event network-changed-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 782.066472] env[62627]: DEBUG oslo_concurrency.lockutils [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] Acquiring lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.066629] env[62627]: DEBUG oslo_concurrency.lockutils [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] Acquired lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.066819] env[62627]: DEBUG nova.network.neutron [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Refreshing network info cache for port 4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 782.069027] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "f09c685d-9103-4c94-a44a-266aacc334f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.746s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.074194] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194094, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.244096] env[62627]: DEBUG nova.compute.utils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 782.247261] env[62627]: DEBUG nova.compute.manager [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 782.247458] env[62627]: DEBUG nova.network.neutron [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 782.300790] env[62627]: DEBUG nova.policy [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d6a2c615f034c49836031736531061c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01b0f3fe63f64e1183acc0fec83bb3f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 782.302722] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquiring lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.342911] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522b9f97-606c-06ec-9ef0-1a0236758c1a, 'name': SearchDatastore_Task, 'duration_secs': 0.012003} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.343316] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.343666] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 76fb58dc-df20-464b-bda0-dcf09d69c718/76fb58dc-df20-464b-bda0-dcf09d69c718.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 782.343999] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dceac369-1b68-4ef8-984e-0c11f75bac7e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.353539] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 782.353539] env[62627]: value = "task-2194095" [ 782.353539] env[62627]: _type = "Task" [ 782.353539] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.363658] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.574993] env[62627]: DEBUG nova.compute.manager [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 782.579403] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194094, 'name': Rename_Task, 'duration_secs': 0.15694} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.579403] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.579648] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-843c8a57-15c5-4f02-9ab5-9f1b1fafb134 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.590364] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Waiting for the task: (returnval){ [ 782.590364] env[62627]: value = "task-2194096" [ 782.590364] env[62627]: _type = "Task" [ 782.590364] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.601372] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194096, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.634303] env[62627]: DEBUG nova.network.neutron [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 782.754736] env[62627]: DEBUG nova.compute.manager [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 782.821312] env[62627]: DEBUG nova.network.neutron [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.866290] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194095, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475165} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.867625] env[62627]: DEBUG nova.network.neutron [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Successfully created port: 33dc8116-8cea-41e8-8ac1-a7f42c7bca81 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 782.872305] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 76fb58dc-df20-464b-bda0-dcf09d69c718/76fb58dc-df20-464b-bda0-dcf09d69c718.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 782.872667] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 782.873390] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d102262-96d6-41ba-a556-ae336824c57d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.884924] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 782.884924] env[62627]: value = "task-2194097" [ 782.884924] env[62627]: _type = "Task" [ 782.884924] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.897937] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.106408] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194096, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.107638] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.324215] env[62627]: DEBUG oslo_concurrency.lockutils [req-159e1116-cf94-4b27-b98e-d6c48a6a6704 req-02f050d0-66ed-415d-ab1f-3384de01f293 service nova] Releasing lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.325596] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquired lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.325596] env[62627]: DEBUG nova.network.neutron [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.404364] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114904} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.404364] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 783.405092] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf173d8-9ca0-46a6-8909-60a419090e23 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.410132] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16422309-c907-45c7-903a-fbf8228c8e63 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.431594] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 76fb58dc-df20-464b-bda0-dcf09d69c718/76fb58dc-df20-464b-bda0-dcf09d69c718.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 783.432425] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32c1a6e3-216e-49ae-a50a-eb871a4d4f2a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.454362] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c89f7d0-5433-41a3-a6f2-d9ec3294abb3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.460709] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 783.460709] env[62627]: value = "task-2194098" [ 783.460709] env[62627]: _type = "Task" [ 783.460709] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.489859] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afba3a9f-1877-4921-9d6d-24b0da1ed915 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.505851] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194098, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.508022] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c970f432-9d9e-4807-a6c5-55c0c71dc834 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.533983] env[62627]: DEBUG nova.compute.provider_tree [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.603251] env[62627]: DEBUG oslo_vmware.api [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Task: {'id': task-2194096, 'name': PowerOnVM_Task, 'duration_secs': 0.65705} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.603564] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 783.603766] env[62627]: DEBUG nova.compute.manager [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.604590] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a7e0f6-8a22-45a7-8dda-e24929c67969 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.767445] env[62627]: DEBUG nova.compute.manager [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 783.801422] env[62627]: DEBUG nova.virt.hardware [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 783.801422] env[62627]: DEBUG nova.virt.hardware [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 783.801422] env[62627]: DEBUG nova.virt.hardware [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 783.801422] env[62627]: DEBUG nova.virt.hardware [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 783.801747] env[62627]: DEBUG nova.virt.hardware [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 783.802890] env[62627]: DEBUG nova.virt.hardware [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 783.802890] env[62627]: DEBUG nova.virt.hardware [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 783.802890] env[62627]: DEBUG nova.virt.hardware [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 783.802890] env[62627]: DEBUG nova.virt.hardware [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 783.802890] env[62627]: DEBUG nova.virt.hardware [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 783.803187] env[62627]: DEBUG nova.virt.hardware [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 783.803945] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7e5fb8-664d-49ad-b676-293162a95f25 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.815451] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bfa8d8-de29-4977-8359-453172548019 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.872081] env[62627]: DEBUG nova.network.neutron [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.998196] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194098, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.038346] env[62627]: DEBUG nova.scheduler.client.report [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 784.045157] env[62627]: DEBUG nova.network.neutron [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updating instance_info_cache with network_info: [{"id": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "address": "fa:16:3e:e4:4d:b6", "network": {"id": "13960831-02bf-4e73-b792-efe56ca391d7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-864734567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4edac85f0a824e4798b50cf27575dfdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cb3b0a3-c2", "ovs_interfaceid": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.129402] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.366766] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquiring lock "63e884c4-2cc1-431d-8a7f-1c4259b29e1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.366766] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "63e884c4-2cc1-431d-8a7f-1c4259b29e1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.366766] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquiring lock "63e884c4-2cc1-431d-8a7f-1c4259b29e1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.366766] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "63e884c4-2cc1-431d-8a7f-1c4259b29e1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.367392] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "63e884c4-2cc1-431d-8a7f-1c4259b29e1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.370289] env[62627]: INFO nova.compute.manager [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Terminating instance [ 784.426810] env[62627]: DEBUG nova.compute.manager [req-531e8511-bf73-46dd-bac8-a9f9eafe1a94 req-0dea82c1-21dc-4d2b-a572-c966104fa4d7 service nova] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Received event network-vif-plugged-33dc8116-8cea-41e8-8ac1-a7f42c7bca81 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 784.427265] env[62627]: DEBUG oslo_concurrency.lockutils [req-531e8511-bf73-46dd-bac8-a9f9eafe1a94 req-0dea82c1-21dc-4d2b-a572-c966104fa4d7 service nova] Acquiring lock "83998771-97a7-4458-834e-df1e4b2914ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.428542] env[62627]: DEBUG oslo_concurrency.lockutils [req-531e8511-bf73-46dd-bac8-a9f9eafe1a94 req-0dea82c1-21dc-4d2b-a572-c966104fa4d7 service nova] Lock "83998771-97a7-4458-834e-df1e4b2914ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.428542] env[62627]: DEBUG oslo_concurrency.lockutils [req-531e8511-bf73-46dd-bac8-a9f9eafe1a94 req-0dea82c1-21dc-4d2b-a572-c966104fa4d7 service nova] Lock "83998771-97a7-4458-834e-df1e4b2914ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.428542] env[62627]: DEBUG nova.compute.manager [req-531e8511-bf73-46dd-bac8-a9f9eafe1a94 req-0dea82c1-21dc-4d2b-a572-c966104fa4d7 service nova] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] No waiting events found dispatching network-vif-plugged-33dc8116-8cea-41e8-8ac1-a7f42c7bca81 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 784.428542] env[62627]: WARNING nova.compute.manager [req-531e8511-bf73-46dd-bac8-a9f9eafe1a94 req-0dea82c1-21dc-4d2b-a572-c966104fa4d7 service nova] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Received unexpected event network-vif-plugged-33dc8116-8cea-41e8-8ac1-a7f42c7bca81 for instance with vm_state building and task_state spawning. [ 784.497979] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194098, 'name': ReconfigVM_Task, 'duration_secs': 0.950007} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.499191] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 76fb58dc-df20-464b-bda0-dcf09d69c718/76fb58dc-df20-464b-bda0-dcf09d69c718.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 784.499191] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed331304-301a-491b-b378-dfc4f5c20ae5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.507269] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 784.507269] env[62627]: value = "task-2194099" [ 784.507269] env[62627]: _type = "Task" [ 784.507269] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.519134] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194099, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.529059] env[62627]: DEBUG nova.network.neutron [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Successfully updated port: 33dc8116-8cea-41e8-8ac1-a7f42c7bca81 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 784.543038] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.805s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.543618] env[62627]: DEBUG nova.compute.manager [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 784.547732] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.101s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.549908] env[62627]: INFO nova.compute.claims [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 784.554610] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Releasing lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.554848] env[62627]: DEBUG nova.compute.manager [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Instance network_info: |[{"id": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "address": "fa:16:3e:e4:4d:b6", "network": {"id": "13960831-02bf-4e73-b792-efe56ca391d7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-864734567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4edac85f0a824e4798b50cf27575dfdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cb3b0a3-c2", "ovs_interfaceid": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 784.555658] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:4d:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 784.564765] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Creating folder: Project (4edac85f0a824e4798b50cf27575dfdb). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 784.565509] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3c61b25-48c6-4865-bc5c-f96f494872ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.585775] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Created folder: Project (4edac85f0a824e4798b50cf27575dfdb) in parent group-v447541. [ 784.586044] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Creating folder: Instances. Parent ref: group-v447597. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 784.586315] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-295d008f-8766-492a-aef7-9c38605fab3a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.599430] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Created folder: Instances in parent group-v447597. [ 784.599726] env[62627]: DEBUG oslo.service.loopingcall [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 784.599945] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 784.600225] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a364b1d1-8255-4551-b322-8f2537675adb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.621600] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 784.621600] env[62627]: value = "task-2194102" [ 784.621600] env[62627]: _type = "Task" [ 784.621600] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.630566] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194102, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.874600] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquiring lock "refresh_cache-63e884c4-2cc1-431d-8a7f-1c4259b29e1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.874600] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquired lock "refresh_cache-63e884c4-2cc1-431d-8a7f-1c4259b29e1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.875095] env[62627]: DEBUG nova.network.neutron [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.018526] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194099, 'name': Rename_Task, 'duration_secs': 0.159952} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.018650] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 785.018836] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5813a793-7f04-4be6-a5ce-39ebdb476238 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.025937] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 785.025937] env[62627]: value = "task-2194103" [ 785.025937] env[62627]: _type = "Task" [ 785.025937] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.030452] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "refresh_cache-83998771-97a7-4458-834e-df1e4b2914ee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.030452] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "refresh_cache-83998771-97a7-4458-834e-df1e4b2914ee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.030452] env[62627]: DEBUG nova.network.neutron [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.034882] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194103, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.056368] env[62627]: DEBUG nova.compute.utils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 785.060055] env[62627]: DEBUG nova.compute.manager [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 785.060147] env[62627]: DEBUG nova.network.neutron [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 785.116228] env[62627]: DEBUG nova.policy [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '137df2297d4e4317ba82a3c6faf09bc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b12d6c9026644c9cb45d687b92161121', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 785.132874] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194102, 'name': CreateVM_Task, 'duration_secs': 0.378598} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.132874] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 785.133366] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.133530] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.133848] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 785.134944] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b62c29bd-e9f6-4787-a599-80403adf4c64 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.140290] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 785.140290] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521a0c9f-efae-68dd-242e-7ef034827103" [ 785.140290] env[62627]: _type = "Task" [ 785.140290] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.149690] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521a0c9f-efae-68dd-242e-7ef034827103, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.401819] env[62627]: DEBUG nova.network.neutron [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.502885] env[62627]: DEBUG nova.network.neutron [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.544412] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194103, 'name': PowerOnVM_Task} progress is 37%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.560830] env[62627]: DEBUG nova.compute.manager [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 785.610195] env[62627]: DEBUG nova.network.neutron [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.655899] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521a0c9f-efae-68dd-242e-7ef034827103, 'name': SearchDatastore_Task, 'duration_secs': 0.011186} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.658672] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.658921] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 785.659178] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.659555] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.660064] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 785.660523] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2d411b0-5473-4e94-bf4f-d8ad3e7f08c9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.673518] env[62627]: DEBUG nova.network.neutron [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Successfully created port: bbeb1b12-c6f2-470f-a952-37da5b4db0c9 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.685378] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 785.685728] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 785.686451] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ddb406c-a154-4e4d-9be4-60a1c6421bd1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.697666] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 785.697666] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52143feb-cc58-cd16-ae64-ee53ff70ae99" [ 785.697666] env[62627]: _type = "Task" [ 785.697666] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.707112] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52143feb-cc58-cd16-ae64-ee53ff70ae99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.942492] env[62627]: DEBUG nova.network.neutron [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Updating instance_info_cache with network_info: [{"id": "33dc8116-8cea-41e8-8ac1-a7f42c7bca81", "address": "fa:16:3e:ed:09:92", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33dc8116-8c", "ovs_interfaceid": "33dc8116-8cea-41e8-8ac1-a7f42c7bca81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.005887] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Releasing lock "refresh_cache-63e884c4-2cc1-431d-8a7f-1c4259b29e1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.009019] env[62627]: DEBUG nova.compute.manager [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 786.009019] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 786.009019] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfde1e3-bfcf-4bea-869d-86d915b79362 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.024509] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 786.024812] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45041122-7e4f-4150-a521-cf9e2858ccf9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.036950] env[62627]: DEBUG oslo_vmware.api [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 786.036950] env[62627]: value = "task-2194104" [ 786.036950] env[62627]: _type = "Task" [ 786.036950] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.041997] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194103, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.059226] env[62627]: DEBUG oslo_vmware.api [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.211095] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52143feb-cc58-cd16-ae64-ee53ff70ae99, 'name': SearchDatastore_Task, 'duration_secs': 0.014874} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.211878] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c9b567-d148-40e8-97a6-d798d9f7c9b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.215660] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d88e14c-dabd-4767-97b7-ae50daa9e2ad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.224015] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 786.224015] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521870b6-aac1-907c-e2fd-93691a0c623e" [ 786.224015] env[62627]: _type = "Task" [ 786.224015] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.225238] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ee8318-18c5-4f75-b93e-fa3239f98a88 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.242835] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521870b6-aac1-907c-e2fd-93691a0c623e, 'name': SearchDatastore_Task, 'duration_secs': 0.013532} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.266281] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.266562] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] fffa23fe-3ce2-4879-b72d-11275d298504/fffa23fe-3ce2-4879-b72d-11275d298504.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 786.267398] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0ce5d3e-3690-4b7c-98cd-0cba7619720e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.269877] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e26cd2a-a4c8-4ce3-bbe6-cd4b2fbdc43a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.279855] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e73bcd-f7c2-483d-91d2-efd61ab713d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.285683] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 786.285683] env[62627]: value = "task-2194105" [ 786.285683] env[62627]: _type = "Task" [ 786.285683] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.297142] env[62627]: DEBUG nova.compute.provider_tree [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.305049] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.445461] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "refresh_cache-83998771-97a7-4458-834e-df1e4b2914ee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.445617] env[62627]: DEBUG nova.compute.manager [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Instance network_info: |[{"id": "33dc8116-8cea-41e8-8ac1-a7f42c7bca81", "address": "fa:16:3e:ed:09:92", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33dc8116-8c", "ovs_interfaceid": "33dc8116-8cea-41e8-8ac1-a7f42c7bca81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 786.445975] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:09:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52f465cb-7418-4172-bd7d-aec00abeb692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33dc8116-8cea-41e8-8ac1-a7f42c7bca81', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.456284] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating folder: Project (01b0f3fe63f64e1183acc0fec83bb3f5). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.457742] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a730913e-f1dd-48c0-b884-e7f1565c490c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.461268] env[62627]: DEBUG nova.compute.manager [req-e9feffe8-205c-40ac-b017-7ca41bcbd142 req-2ca66222-0da9-4e40-98d2-943ee763da39 service nova] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Received event network-changed-33dc8116-8cea-41e8-8ac1-a7f42c7bca81 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 786.461458] env[62627]: DEBUG nova.compute.manager [req-e9feffe8-205c-40ac-b017-7ca41bcbd142 req-2ca66222-0da9-4e40-98d2-943ee763da39 service nova] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Refreshing instance network info cache due to event network-changed-33dc8116-8cea-41e8-8ac1-a7f42c7bca81. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 786.461669] env[62627]: DEBUG oslo_concurrency.lockutils [req-e9feffe8-205c-40ac-b017-7ca41bcbd142 req-2ca66222-0da9-4e40-98d2-943ee763da39 service nova] Acquiring lock "refresh_cache-83998771-97a7-4458-834e-df1e4b2914ee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.461811] env[62627]: DEBUG oslo_concurrency.lockutils [req-e9feffe8-205c-40ac-b017-7ca41bcbd142 req-2ca66222-0da9-4e40-98d2-943ee763da39 service nova] Acquired lock "refresh_cache-83998771-97a7-4458-834e-df1e4b2914ee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.461967] env[62627]: DEBUG nova.network.neutron [req-e9feffe8-205c-40ac-b017-7ca41bcbd142 req-2ca66222-0da9-4e40-98d2-943ee763da39 service nova] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Refreshing network info cache for port 33dc8116-8cea-41e8-8ac1-a7f42c7bca81 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 786.476481] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Created folder: Project (01b0f3fe63f64e1183acc0fec83bb3f5) in parent group-v447541. [ 786.476645] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating folder: Instances. Parent ref: group-v447600. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.476786] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a54545e-2837-44a1-a1a3-a64ff9d90443 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.489091] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Created folder: Instances in parent group-v447600. [ 786.489333] env[62627]: DEBUG oslo.service.loopingcall [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.489523] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 786.489722] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d50ebbe5-5d61-4cb5-8bcd-9fa9eefe263a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.509472] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 786.509472] env[62627]: value = "task-2194108" [ 786.509472] env[62627]: _type = "Task" [ 786.509472] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.517926] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194108, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.539336] env[62627]: DEBUG oslo_vmware.api [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194103, 'name': PowerOnVM_Task, 'duration_secs': 1.065059} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.539598] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 786.539790] env[62627]: INFO nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Took 9.41 seconds to spawn the instance on the hypervisor. [ 786.539948] env[62627]: DEBUG nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 786.540752] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408a41fe-1619-43d6-a9ed-e48a157a4d83 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.552333] env[62627]: DEBUG oslo_vmware.api [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194104, 'name': PowerOffVM_Task, 'duration_secs': 0.208947} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.553641] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 786.553817] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 786.556775] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd215a7d-a068-4655-b853-b913580e5efc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.574381] env[62627]: DEBUG nova.compute.manager [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 786.590165] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 786.590444] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 786.590651] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Deleting the datastore file [datastore2] 63e884c4-2cc1-431d-8a7f-1c4259b29e1a {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 786.590980] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2614d996-da3f-4285-8731-76d620852b53 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.602499] env[62627]: DEBUG oslo_vmware.api [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for the task: (returnval){ [ 786.602499] env[62627]: value = "task-2194110" [ 786.602499] env[62627]: _type = "Task" [ 786.602499] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.614387] env[62627]: DEBUG oslo_vmware.api [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194110, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.616648] env[62627]: DEBUG nova.virt.hardware [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 786.616970] env[62627]: DEBUG nova.virt.hardware [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.617105] env[62627]: DEBUG nova.virt.hardware [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 786.617337] env[62627]: DEBUG nova.virt.hardware [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.617515] env[62627]: DEBUG nova.virt.hardware [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 786.617686] env[62627]: DEBUG nova.virt.hardware [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 786.617906] env[62627]: DEBUG nova.virt.hardware [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 786.618115] env[62627]: DEBUG nova.virt.hardware [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 786.618315] env[62627]: DEBUG nova.virt.hardware [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 786.618518] env[62627]: DEBUG nova.virt.hardware [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 786.618750] env[62627]: DEBUG nova.virt.hardware [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 786.619890] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa34be90-1dc9-40eb-9b37-300cd541b292 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.629156] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9d52d8-0978-4c25-9ecf-9071b3610c07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.803753] env[62627]: DEBUG nova.scheduler.client.report [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 786.808908] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194105, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.026676] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194108, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.066789] env[62627]: INFO nova.compute.manager [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Took 35.28 seconds to build instance. [ 787.114390] env[62627]: DEBUG oslo_vmware.api [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Task: {'id': task-2194110, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.430983} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.116957] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 787.117357] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 787.117579] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 787.117772] env[62627]: INFO nova.compute.manager [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 787.118042] env[62627]: DEBUG oslo.service.loopingcall [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 787.118324] env[62627]: DEBUG nova.compute.manager [-] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 787.118381] env[62627]: DEBUG nova.network.neutron [-] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 787.139355] env[62627]: DEBUG nova.network.neutron [-] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.300769] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194105, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.75563} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.301175] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] fffa23fe-3ce2-4879-b72d-11275d298504/fffa23fe-3ce2-4879-b72d-11275d298504.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 787.301500] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 787.301805] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab42dc62-7f03-4106-852b-fd08cad09fc7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.310206] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.764s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.310850] env[62627]: DEBUG nova.compute.manager [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 787.313702] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 787.313702] env[62627]: value = "task-2194111" [ 787.313702] env[62627]: _type = "Task" [ 787.313702] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.317301] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.883s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.317301] env[62627]: INFO nova.compute.claims [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 787.334956] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.357654] env[62627]: DEBUG nova.network.neutron [req-e9feffe8-205c-40ac-b017-7ca41bcbd142 req-2ca66222-0da9-4e40-98d2-943ee763da39 service nova] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Updated VIF entry in instance network info cache for port 33dc8116-8cea-41e8-8ac1-a7f42c7bca81. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 787.358047] env[62627]: DEBUG nova.network.neutron [req-e9feffe8-205c-40ac-b017-7ca41bcbd142 req-2ca66222-0da9-4e40-98d2-943ee763da39 service nova] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Updating instance_info_cache with network_info: [{"id": "33dc8116-8cea-41e8-8ac1-a7f42c7bca81", "address": "fa:16:3e:ed:09:92", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33dc8116-8c", "ovs_interfaceid": "33dc8116-8cea-41e8-8ac1-a7f42c7bca81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.523546] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194108, 'name': CreateVM_Task, 'duration_secs': 0.515732} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.523923] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 787.524559] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.524761] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.525471] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 787.526209] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c073b3e-73bb-46e0-b678-a676289837a8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.531256] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 787.531256] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]527312de-77df-ef91-123a-ef3c16a38bd9" [ 787.531256] env[62627]: _type = "Task" [ 787.531256] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.545352] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527312de-77df-ef91-123a-ef3c16a38bd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.569277] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7e3fd573-9e49-4953-8745-60fbe2306a32 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "76fb58dc-df20-464b-bda0-dcf09d69c718" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.195s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.585652] env[62627]: DEBUG nova.network.neutron [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Successfully updated port: bbeb1b12-c6f2-470f-a952-37da5b4db0c9 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 787.642417] env[62627]: DEBUG nova.network.neutron [-] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.691509] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.691509] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.691672] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.691855] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.693839] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.697361] env[62627]: INFO nova.compute.manager [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Terminating instance [ 787.816739] env[62627]: DEBUG nova.compute.utils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 787.819268] env[62627]: DEBUG nova.compute.manager [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 787.819464] env[62627]: DEBUG nova.network.neutron [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 787.832968] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.177366} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.834153] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 787.834965] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c6fdef-ce42-4a70-a323-6cef9e5ad7b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.861024] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] fffa23fe-3ce2-4879-b72d-11275d298504/fffa23fe-3ce2-4879-b72d-11275d298504.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 787.861024] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fa1cc3c-3871-4f11-8887-2bad0b90e491 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.874269] env[62627]: DEBUG oslo_concurrency.lockutils [req-e9feffe8-205c-40ac-b017-7ca41bcbd142 req-2ca66222-0da9-4e40-98d2-943ee763da39 service nova] Releasing lock "refresh_cache-83998771-97a7-4458-834e-df1e4b2914ee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.881698] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 787.881698] env[62627]: value = "task-2194112" [ 787.881698] env[62627]: _type = "Task" [ 787.881698] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.892112] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194112, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.895569] env[62627]: DEBUG nova.policy [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e2bfdcb7e8e4d619bb9ce5fdcf9adb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d36f0395b094ef4aa913b538a31f6a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 788.046483] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527312de-77df-ef91-123a-ef3c16a38bd9, 'name': SearchDatastore_Task, 'duration_secs': 0.026067} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.047975] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.047975] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.047975] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.047975] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.048472] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 788.048472] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3041149a-f8d0-46f7-b4b6-a2ddcc88c849 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.063662] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 788.063843] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 788.064821] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb7fff48-b7d0-484d-892b-4b1dc4c8d7fe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.074143] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 788.074143] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528472c1-7b4c-11df-5a73-babd1b8b351a" [ 788.074143] env[62627]: _type = "Task" [ 788.074143] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.077478] env[62627]: DEBUG nova.compute.manager [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 788.086276] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528472c1-7b4c-11df-5a73-babd1b8b351a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.089020] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.089020] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquired lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.089020] env[62627]: DEBUG nova.network.neutron [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.145631] env[62627]: INFO nova.compute.manager [-] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Took 1.03 seconds to deallocate network for instance. [ 788.200848] env[62627]: DEBUG nova.compute.manager [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 788.200848] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 788.202398] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628327b8-d323-495e-af4a-32fc40844998 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.213827] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 788.213827] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63c32ba5-189c-47a3-93d5-53bd7fd7954a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.222321] env[62627]: DEBUG oslo_vmware.api [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 788.222321] env[62627]: value = "task-2194113" [ 788.222321] env[62627]: _type = "Task" [ 788.222321] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.231239] env[62627]: DEBUG oslo_vmware.api [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194113, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.322783] env[62627]: DEBUG nova.compute.manager [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 788.326250] env[62627]: DEBUG nova.network.neutron [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Successfully created port: 5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 788.399744] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194112, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.594979] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528472c1-7b4c-11df-5a73-babd1b8b351a, 'name': SearchDatastore_Task, 'duration_secs': 0.031756} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.603045] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f664137a-2a37-477d-94ce-dc87c90344d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.615727] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 788.615727] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5260b5fb-26f8-871d-16ed-491d2bce2d59" [ 788.615727] env[62627]: _type = "Task" [ 788.615727] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.623630] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.634653] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5260b5fb-26f8-871d-16ed-491d2bce2d59, 'name': SearchDatastore_Task, 'duration_secs': 0.014384} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.634653] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.634832] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 83998771-97a7-4458-834e-df1e4b2914ee/83998771-97a7-4458-834e-df1e4b2914ee.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 788.635034] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f5c927b-0a68-4cb8-9f7d-3eb4cbb1a031 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.643292] env[62627]: DEBUG nova.compute.manager [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Received event network-vif-plugged-bbeb1b12-c6f2-470f-a952-37da5b4db0c9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 788.643587] env[62627]: DEBUG oslo_concurrency.lockutils [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] Acquiring lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.643860] env[62627]: DEBUG oslo_concurrency.lockutils [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.644062] env[62627]: DEBUG oslo_concurrency.lockutils [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.644288] env[62627]: DEBUG nova.compute.manager [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] No waiting events found dispatching network-vif-plugged-bbeb1b12-c6f2-470f-a952-37da5b4db0c9 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 788.644471] env[62627]: WARNING nova.compute.manager [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Received unexpected event network-vif-plugged-bbeb1b12-c6f2-470f-a952-37da5b4db0c9 for instance with vm_state building and task_state spawning. [ 788.644650] env[62627]: DEBUG nova.compute.manager [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Received event network-changed-bbeb1b12-c6f2-470f-a952-37da5b4db0c9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 788.644861] env[62627]: DEBUG nova.compute.manager [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Refreshing instance network info cache due to event network-changed-bbeb1b12-c6f2-470f-a952-37da5b4db0c9. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 788.644975] env[62627]: DEBUG oslo_concurrency.lockutils [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] Acquiring lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.649101] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 788.649101] env[62627]: value = "task-2194114" [ 788.649101] env[62627]: _type = "Task" [ 788.649101] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.656958] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.666693] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194114, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.701604] env[62627]: DEBUG nova.network.neutron [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.735921] env[62627]: DEBUG oslo_vmware.api [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194113, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.899410] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194112, 'name': ReconfigVM_Task, 'duration_secs': 0.992334} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.902423] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Reconfigured VM instance instance-00000015 to attach disk [datastore2] fffa23fe-3ce2-4879-b72d-11275d298504/fffa23fe-3ce2-4879-b72d-11275d298504.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 788.904538] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f7c64e1-155b-41ae-86ba-d7ec9b1b25ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.915334] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 788.915334] env[62627]: value = "task-2194115" [ 788.915334] env[62627]: _type = "Task" [ 788.915334] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.931748] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194115, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.084114] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c335579-93c6-476c-a8a1-fe5c5c3b487d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.094131] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3dca89-a869-419b-af4d-a6266caebdde {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.129302] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e12bdf-5080-4d99-8a91-a32e0b877962 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.137975] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797e540b-6daa-42ce-8056-76d2f53017fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.154746] env[62627]: DEBUG nova.compute.provider_tree [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.174099] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194114, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.234680] env[62627]: DEBUG oslo_vmware.api [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194113, 'name': PowerOffVM_Task, 'duration_secs': 0.671249} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.235448] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 789.235686] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 789.236015] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97f29a27-e393-48e8-9753-fbb1dd6a6e3f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.270425] env[62627]: DEBUG nova.network.neutron [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Updating instance_info_cache with network_info: [{"id": "bbeb1b12-c6f2-470f-a952-37da5b4db0c9", "address": "fa:16:3e:d9:ed:be", "network": {"id": "5d65934a-2966-4094-bc55-817726caaea5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1246247596-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12d6c9026644c9cb45d687b92161121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbeb1b12-c6", "ovs_interfaceid": "bbeb1b12-c6f2-470f-a952-37da5b4db0c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.344974] env[62627]: DEBUG nova.compute.manager [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 789.377590] env[62627]: DEBUG nova.virt.hardware [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 789.378254] env[62627]: DEBUG nova.virt.hardware [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 789.378668] env[62627]: DEBUG nova.virt.hardware [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 789.379132] env[62627]: DEBUG nova.virt.hardware [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 789.379513] env[62627]: DEBUG nova.virt.hardware [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 789.379900] env[62627]: DEBUG nova.virt.hardware [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 789.382021] env[62627]: DEBUG nova.virt.hardware [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 789.382021] env[62627]: DEBUG nova.virt.hardware [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 789.382021] env[62627]: DEBUG nova.virt.hardware [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 789.382021] env[62627]: DEBUG nova.virt.hardware [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 789.382021] env[62627]: DEBUG nova.virt.hardware [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 789.383170] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae1e3f3-efa2-4043-91d0-7a3203546b81 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.397031] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69db0409-0a2b-4afd-bdab-6d20b9258e1d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.432817] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194115, 'name': Rename_Task, 'duration_secs': 0.394122} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.433424] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 789.433913] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-501dba45-f332-4183-b933-92e02ec2dc23 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.445024] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 789.445024] env[62627]: value = "task-2194117" [ 789.445024] env[62627]: _type = "Task" [ 789.445024] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.458113] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194117, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.668561] env[62627]: DEBUG nova.scheduler.client.report [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 789.672224] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194114, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545399} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.672462] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 83998771-97a7-4458-834e-df1e4b2914ee/83998771-97a7-4458-834e-df1e4b2914ee.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 789.672783] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 789.672953] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c75550e6-2ce6-45b5-b96d-d1d838a890dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.683856] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 789.683856] env[62627]: value = "task-2194118" [ 789.683856] env[62627]: _type = "Task" [ 789.683856] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.695050] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194118, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.776662] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Releasing lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.777055] env[62627]: DEBUG nova.compute.manager [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Instance network_info: |[{"id": "bbeb1b12-c6f2-470f-a952-37da5b4db0c9", "address": "fa:16:3e:d9:ed:be", "network": {"id": "5d65934a-2966-4094-bc55-817726caaea5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1246247596-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12d6c9026644c9cb45d687b92161121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbeb1b12-c6", "ovs_interfaceid": "bbeb1b12-c6f2-470f-a952-37da5b4db0c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 789.777397] env[62627]: DEBUG oslo_concurrency.lockutils [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] Acquired lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.777581] env[62627]: DEBUG nova.network.neutron [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Refreshing network info cache for port bbeb1b12-c6f2-470f-a952-37da5b4db0c9 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 789.778854] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:ed:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bbeb1b12-c6f2-470f-a952-37da5b4db0c9', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 789.789135] env[62627]: DEBUG oslo.service.loopingcall [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 789.790339] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 789.790406] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3290664-519b-415f-ba88-444e955142c6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.812239] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 789.812239] env[62627]: value = "task-2194119" [ 789.812239] env[62627]: _type = "Task" [ 789.812239] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.823360] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194119, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.955718] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194117, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.178640] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.863s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.178640] env[62627]: DEBUG nova.compute.manager [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 790.180264] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.477s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.180685] env[62627]: DEBUG nova.objects.instance [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lazy-loading 'resources' on Instance uuid e8c87637-4a30-4482-af21-f58ad8a218fc {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 790.196103] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194118, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082077} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.196520] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 790.197202] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04fb55f-3750-4309-86e9-c1f5e6f345c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.223027] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] 83998771-97a7-4458-834e-df1e4b2914ee/83998771-97a7-4458-834e-df1e4b2914ee.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.223756] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ed55be6-d8d4-49b3-bc16-9816e3947b4c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.239219] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 790.243037] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 790.243037] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Deleting the datastore file [datastore1] 15799515-ab7d-4e7c-a6e0-c740baf4f93a {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 790.243037] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d211597-62d8-4ebd-8913-3b104ea95771 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.252019] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 790.252019] env[62627]: value = "task-2194121" [ 790.252019] env[62627]: _type = "Task" [ 790.252019] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.252019] env[62627]: DEBUG oslo_vmware.api [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 790.252019] env[62627]: value = "task-2194120" [ 790.252019] env[62627]: _type = "Task" [ 790.252019] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.268997] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194121, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.270438] env[62627]: DEBUG oslo_vmware.api [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.323888] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194119, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.465892] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194117, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.684335] env[62627]: DEBUG nova.compute.utils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 790.685801] env[62627]: DEBUG nova.compute.manager [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Not allocating networking since 'none' was specified. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 790.737161] env[62627]: DEBUG nova.network.neutron [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Updated VIF entry in instance network info cache for port bbeb1b12-c6f2-470f-a952-37da5b4db0c9. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 790.737518] env[62627]: DEBUG nova.network.neutron [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Updating instance_info_cache with network_info: [{"id": "bbeb1b12-c6f2-470f-a952-37da5b4db0c9", "address": "fa:16:3e:d9:ed:be", "network": {"id": "5d65934a-2966-4094-bc55-817726caaea5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1246247596-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12d6c9026644c9cb45d687b92161121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbeb1b12-c6", "ovs_interfaceid": "bbeb1b12-c6f2-470f-a952-37da5b4db0c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.754504] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "574293e3-8ec8-410f-8a30-83bc60d25d64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.754861] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.771496] env[62627]: DEBUG oslo_vmware.api [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.776054] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194121, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.827694] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194119, 'name': CreateVM_Task, 'duration_secs': 0.782662} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.830811] env[62627]: DEBUG nova.network.neutron [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Successfully updated port: 5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 790.832766] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 790.836148] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.836845] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.837206] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 790.838312] env[62627]: DEBUG nova.compute.manager [req-4354561a-24c2-4f4a-b473-af83b5d43429 req-8e1e48f5-cb1f-457e-a546-8ff30bfb518f service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Received event network-vif-plugged-5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 790.838513] env[62627]: DEBUG oslo_concurrency.lockutils [req-4354561a-24c2-4f4a-b473-af83b5d43429 req-8e1e48f5-cb1f-457e-a546-8ff30bfb518f service nova] Acquiring lock "faa16f10-63f0-457c-8cd7-20a624454d86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.838704] env[62627]: DEBUG oslo_concurrency.lockutils [req-4354561a-24c2-4f4a-b473-af83b5d43429 req-8e1e48f5-cb1f-457e-a546-8ff30bfb518f service nova] Lock "faa16f10-63f0-457c-8cd7-20a624454d86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.838879] env[62627]: DEBUG oslo_concurrency.lockutils [req-4354561a-24c2-4f4a-b473-af83b5d43429 req-8e1e48f5-cb1f-457e-a546-8ff30bfb518f service nova] Lock "faa16f10-63f0-457c-8cd7-20a624454d86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.839072] env[62627]: DEBUG nova.compute.manager [req-4354561a-24c2-4f4a-b473-af83b5d43429 req-8e1e48f5-cb1f-457e-a546-8ff30bfb518f service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] No waiting events found dispatching network-vif-plugged-5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 790.839251] env[62627]: WARNING nova.compute.manager [req-4354561a-24c2-4f4a-b473-af83b5d43429 req-8e1e48f5-cb1f-457e-a546-8ff30bfb518f service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Received unexpected event network-vif-plugged-5db92929-a96d-41c2-be2b-95c9ef62c4bf for instance with vm_state building and task_state spawning. [ 790.839784] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-575d81e0-dda5-4f8e-b3ad-1ddefe3a8850 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.848362] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 790.848362] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d46ab9-12aa-344c-c20d-f6d29a4785c5" [ 790.848362] env[62627]: _type = "Task" [ 790.848362] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.862394] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d46ab9-12aa-344c-c20d-f6d29a4785c5, 'name': SearchDatastore_Task, 'duration_secs': 0.01214} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.862690] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.862924] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 790.863170] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.863341] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.863530] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 790.864098] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9983293-5ec8-48ef-9d39-dfd4cbdd38d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.875913] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 790.876113] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 790.876835] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c44b86c-0485-4ac9-a309-b32671917dd0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.886089] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 790.886089] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f4e05c-8ef3-21c9-7ff5-a00468a3b92d" [ 790.886089] env[62627]: _type = "Task" [ 790.886089] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.894335] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f4e05c-8ef3-21c9-7ff5-a00468a3b92d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.961056] env[62627]: DEBUG oslo_vmware.api [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194117, 'name': PowerOnVM_Task, 'duration_secs': 1.077712} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.961214] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 790.961420] env[62627]: INFO nova.compute.manager [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Took 10.06 seconds to spawn the instance on the hypervisor. [ 790.961597] env[62627]: DEBUG nova.compute.manager [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 790.962383] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06eb2fc-d175-45fe-95e6-327e813a2af0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.189820] env[62627]: DEBUG nova.compute.manager [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 791.222097] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630ad863-cc17-4443-8be8-f72461c32324 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.229330] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930fe6e6-b38d-4506-9649-f4e4d3ab2f5a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.265695] env[62627]: DEBUG oslo_concurrency.lockutils [req-db8ee5dd-3644-4b8d-98b9-b6249f014651 req-b3c9907b-71fc-4741-b4f1-9789c719d93c service nova] Releasing lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.308560] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c524b4e7-52ac-40e9-b9ca-84b23ce3636f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.308560] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194121, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.308560] env[62627]: DEBUG oslo_vmware.api [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194120, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.612963} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.308560] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8efe9c2-fb9b-4147-90e7-827668b78590 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.312102] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 791.312312] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 791.312495] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 791.312694] env[62627]: INFO nova.compute.manager [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Took 3.11 seconds to destroy the instance on the hypervisor. [ 791.312906] env[62627]: DEBUG oslo.service.loopingcall [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 791.314063] env[62627]: DEBUG nova.compute.manager [-] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 791.314171] env[62627]: DEBUG nova.network.neutron [-] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 791.331181] env[62627]: DEBUG nova.compute.provider_tree [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.336666] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquiring lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.336921] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquired lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.337784] env[62627]: DEBUG nova.network.neutron [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.396927] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f4e05c-8ef3-21c9-7ff5-a00468a3b92d, 'name': SearchDatastore_Task, 'duration_secs': 0.02611} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.397705] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c61c862-bb56-4707-a932-ae0f4cd212ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.403431] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 791.403431] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52996503-1337-2648-72ca-9c4ddef04287" [ 791.403431] env[62627]: _type = "Task" [ 791.403431] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.411704] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52996503-1337-2648-72ca-9c4ddef04287, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.491140] env[62627]: INFO nova.compute.manager [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Took 30.48 seconds to build instance. [ 791.777447] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194121, 'name': ReconfigVM_Task, 'duration_secs': 1.323689} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.777735] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Reconfigured VM instance instance-00000016 to attach disk [datastore2] 83998771-97a7-4458-834e-df1e4b2914ee/83998771-97a7-4458-834e-df1e4b2914ee.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 791.778874] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac0c03f7-ef22-4a2a-a930-14da1418568c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.783948] env[62627]: INFO nova.compute.manager [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Rescuing [ 791.784222] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquiring lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.784384] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquired lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.784599] env[62627]: DEBUG nova.network.neutron [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.789046] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 791.789046] env[62627]: value = "task-2194122" [ 791.789046] env[62627]: _type = "Task" [ 791.789046] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.796801] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194122, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.836217] env[62627]: DEBUG nova.scheduler.client.report [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 791.890247] env[62627]: DEBUG nova.network.neutron [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.915258] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52996503-1337-2648-72ca-9c4ddef04287, 'name': SearchDatastore_Task, 'duration_secs': 0.010878} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.915565] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.915836] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e/2cfcbc3b-c2b9-4155-a48a-0e73eea0589e.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 791.916118] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87965647-cb32-4e62-b5e9-39fc2bc37702 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.923446] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 791.923446] env[62627]: value = "task-2194123" [ 791.923446] env[62627]: _type = "Task" [ 791.923446] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.932686] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.996524] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9196fa40-c789-4871-98d3-be7604e38c93 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Lock "fffa23fe-3ce2-4879-b72d-11275d298504" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.228s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.075918] env[62627]: DEBUG nova.network.neutron [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updating instance_info_cache with network_info: [{"id": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "address": "fa:16:3e:c2:49:86", "network": {"id": "c9ffbe6a-63fb-4479-bc68-6209e534cbe0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-213656681-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d36f0395b094ef4aa913b538a31f6a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db92929-a9", "ovs_interfaceid": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.199717] env[62627]: DEBUG nova.compute.manager [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 792.202392] env[62627]: DEBUG nova.network.neutron [-] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.227533] env[62627]: DEBUG nova.virt.hardware [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 792.227776] env[62627]: DEBUG nova.virt.hardware [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.227929] env[62627]: DEBUG nova.virt.hardware [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 792.228125] env[62627]: DEBUG nova.virt.hardware [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.228277] env[62627]: DEBUG nova.virt.hardware [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 792.228419] env[62627]: DEBUG nova.virt.hardware [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 792.228615] env[62627]: DEBUG nova.virt.hardware [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 792.228770] env[62627]: DEBUG nova.virt.hardware [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 792.228931] env[62627]: DEBUG nova.virt.hardware [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 792.229114] env[62627]: DEBUG nova.virt.hardware [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 792.229324] env[62627]: DEBUG nova.virt.hardware [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 792.230428] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73aee6b8-9fdc-4e59-a667-dcbb4c1cc25f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.238927] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3484c4-a208-4faf-a593-dba28f45793c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.253321] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 792.259021] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Creating folder: Project (34379c88e902406887bad33b77ba501c). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.259110] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ef4b53f-8fce-4227-838e-8538a38e4708 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.271036] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Created folder: Project (34379c88e902406887bad33b77ba501c) in parent group-v447541. [ 792.271138] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Creating folder: Instances. Parent ref: group-v447604. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.271348] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c5c3736-5dc9-42fb-8c57-1ce9a295039a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.282089] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Created folder: Instances in parent group-v447604. [ 792.282322] env[62627]: DEBUG oslo.service.loopingcall [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 792.282571] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 792.282730] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fbe61003-7d9b-41ed-97ba-1afeb9c4b289 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.306044] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194122, 'name': Rename_Task, 'duration_secs': 0.148266} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.307063] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 792.307296] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 792.307296] env[62627]: value = "task-2194126" [ 792.307296] env[62627]: _type = "Task" [ 792.307296] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.307505] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d863a8bc-6f9d-478e-9356-15e61edf0b2f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.317071] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194126, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.318381] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 792.318381] env[62627]: value = "task-2194127" [ 792.318381] env[62627]: _type = "Task" [ 792.318381] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.326575] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194127, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.347310] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.166s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.348938] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.583s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.349232] env[62627]: DEBUG nova.objects.instance [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lazy-loading 'resources' on Instance uuid 33bf3932-3746-445b-b0fe-611466cdd9b1 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 792.377323] env[62627]: INFO nova.scheduler.client.report [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleted allocations for instance e8c87637-4a30-4482-af21-f58ad8a218fc [ 792.434472] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.499557] env[62627]: DEBUG nova.compute.manager [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 792.543419] env[62627]: DEBUG nova.network.neutron [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updating instance_info_cache with network_info: [{"id": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "address": "fa:16:3e:e4:4d:b6", "network": {"id": "13960831-02bf-4e73-b792-efe56ca391d7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-864734567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4edac85f0a824e4798b50cf27575dfdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cb3b0a3-c2", "ovs_interfaceid": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.579050] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Releasing lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.579573] env[62627]: DEBUG nova.compute.manager [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Instance network_info: |[{"id": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "address": "fa:16:3e:c2:49:86", "network": {"id": "c9ffbe6a-63fb-4479-bc68-6209e534cbe0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-213656681-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d36f0395b094ef4aa913b538a31f6a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db92929-a9", "ovs_interfaceid": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 792.579925] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:49:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5db92929-a96d-41c2-be2b-95c9ef62c4bf', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 792.587818] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Creating folder: Project (9d36f0395b094ef4aa913b538a31f6a9). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.588447] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-011d705a-8095-4d65-986b-fffae947eb03 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.608635] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Created folder: Project (9d36f0395b094ef4aa913b538a31f6a9) in parent group-v447541. [ 792.608952] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Creating folder: Instances. Parent ref: group-v447607. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.609418] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac51e63d-69a6-4298-bcbf-c31ecb39273f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.624514] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Created folder: Instances in parent group-v447607. [ 792.624844] env[62627]: DEBUG oslo.service.loopingcall [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 792.625086] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 792.625315] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-848a2a66-839a-401e-a6b6-e5e21d012f2a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.649977] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 792.649977] env[62627]: value = "task-2194130" [ 792.649977] env[62627]: _type = "Task" [ 792.649977] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.659876] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194130, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.705833] env[62627]: INFO nova.compute.manager [-] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Took 1.39 seconds to deallocate network for instance. [ 792.821261] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194126, 'name': CreateVM_Task, 'duration_secs': 0.405708} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.824817] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 792.825269] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.825429] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.825829] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 792.826560] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb49f8f4-f098-4e9c-ba98-7943e8982c1a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.831952] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194127, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.833849] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 792.833849] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525978bb-3ab7-5fd2-4189-5a863d1a22e4" [ 792.833849] env[62627]: _type = "Task" [ 792.833849] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.843154] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525978bb-3ab7-5fd2-4189-5a863d1a22e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.886761] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9238b777-b0d4-44d3-9d4b-954d997d6046 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "e8c87637-4a30-4482-af21-f58ad8a218fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.840s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.939087] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194123, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.93435} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.939192] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e/2cfcbc3b-c2b9-4155-a48a-0e73eea0589e.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 792.939477] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 792.941101] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd51d3fe-c3d1-4608-8518-f349af65bcf4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.944383] env[62627]: DEBUG nova.compute.manager [req-09f9fa18-95cd-4b58-ae57-cf1f48d05f9d req-50980d53-8ebb-49d6-9084-26b1718a7b05 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Received event network-changed-5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 792.944587] env[62627]: DEBUG nova.compute.manager [req-09f9fa18-95cd-4b58-ae57-cf1f48d05f9d req-50980d53-8ebb-49d6-9084-26b1718a7b05 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Refreshing instance network info cache due to event network-changed-5db92929-a96d-41c2-be2b-95c9ef62c4bf. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 792.944818] env[62627]: DEBUG oslo_concurrency.lockutils [req-09f9fa18-95cd-4b58-ae57-cf1f48d05f9d req-50980d53-8ebb-49d6-9084-26b1718a7b05 service nova] Acquiring lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.944960] env[62627]: DEBUG oslo_concurrency.lockutils [req-09f9fa18-95cd-4b58-ae57-cf1f48d05f9d req-50980d53-8ebb-49d6-9084-26b1718a7b05 service nova] Acquired lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.945132] env[62627]: DEBUG nova.network.neutron [req-09f9fa18-95cd-4b58-ae57-cf1f48d05f9d req-50980d53-8ebb-49d6-9084-26b1718a7b05 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Refreshing network info cache for port 5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 792.956636] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 792.956636] env[62627]: value = "task-2194131" [ 792.956636] env[62627]: _type = "Task" [ 792.956636] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.971477] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194131, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.023131] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.048200] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Releasing lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.162224] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194130, 'name': CreateVM_Task, 'duration_secs': 0.421455} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.164924] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 793.165868] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.214464] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.335198] env[62627]: DEBUG oslo_vmware.api [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194127, 'name': PowerOnVM_Task, 'duration_secs': 0.729675} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.338256] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 793.338469] env[62627]: INFO nova.compute.manager [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Took 9.57 seconds to spawn the instance on the hypervisor. [ 793.338645] env[62627]: DEBUG nova.compute.manager [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 793.341624] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8b2346-924e-43de-b866-ed47745f5b2a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.350724] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525978bb-3ab7-5fd2-4189-5a863d1a22e4, 'name': SearchDatastore_Task, 'duration_secs': 0.011479} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.352320] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.352770] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 793.352770] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.352915] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.353115] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 793.360590] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.361307] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 793.361667] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-507cf83a-a081-4055-ba97-17645a3e6ff6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.364121] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49b1f179-01c1-4241-afba-faacee19b605 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.369856] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 793.369856] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cc1814-623c-f24b-544f-2648c93f93fe" [ 793.369856] env[62627]: _type = "Task" [ 793.369856] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.376071] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 793.376071] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 793.381280] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2a055f7-856f-4075-bb0a-a5d6635c9a3e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.383617] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cc1814-623c-f24b-544f-2648c93f93fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.387633] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 793.387633] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521aae66-531b-02ba-8f5f-3f8159215a13" [ 793.387633] env[62627]: _type = "Task" [ 793.387633] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.397135] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521aae66-531b-02ba-8f5f-3f8159215a13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.425348] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b73c659-3755-4a24-a478-a7e6c4d7f99d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.433580] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed14f72-2fe9-441f-b3c4-c3a53ba96f80 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.474018] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de5b2dd-2f18-490a-b5c2-f0491c094a72 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.483192] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194131, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099961} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.484740] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f27db6-b218-4b9c-8008-5fce26493a05 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.489393] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 793.490174] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cca93ea-c3fd-4277-af5a-ec96a70ec467 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.503730] env[62627]: DEBUG nova.compute.provider_tree [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.528986] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e/2cfcbc3b-c2b9-4155-a48a-0e73eea0589e.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 793.532894] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c24f062b-a5b5-4890-8149-81e7843cd310 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.555109] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 793.555109] env[62627]: value = "task-2194132" [ 793.555109] env[62627]: _type = "Task" [ 793.555109] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.564560] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194132, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.801714] env[62627]: DEBUG nova.network.neutron [req-09f9fa18-95cd-4b58-ae57-cf1f48d05f9d req-50980d53-8ebb-49d6-9084-26b1718a7b05 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updated VIF entry in instance network info cache for port 5db92929-a96d-41c2-be2b-95c9ef62c4bf. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 793.802079] env[62627]: DEBUG nova.network.neutron [req-09f9fa18-95cd-4b58-ae57-cf1f48d05f9d req-50980d53-8ebb-49d6-9084-26b1718a7b05 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updating instance_info_cache with network_info: [{"id": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "address": "fa:16:3e:c2:49:86", "network": {"id": "c9ffbe6a-63fb-4479-bc68-6209e534cbe0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-213656681-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d36f0395b094ef4aa913b538a31f6a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db92929-a9", "ovs_interfaceid": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.878775] env[62627]: INFO nova.compute.manager [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Took 32.28 seconds to build instance. [ 793.888640] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cc1814-623c-f24b-544f-2648c93f93fe, 'name': SearchDatastore_Task, 'duration_secs': 0.013209} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.892783] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.892783] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 793.893955] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.900597] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521aae66-531b-02ba-8f5f-3f8159215a13, 'name': SearchDatastore_Task, 'duration_secs': 0.014787} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.902168] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-949261f8-3901-42b9-a856-7f5be1fb172a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.908891] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 793.908891] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523888f5-2c5f-b939-4004-03069962119e" [ 793.908891] env[62627]: _type = "Task" [ 793.908891] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.918324] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523888f5-2c5f-b939-4004-03069962119e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.033103] env[62627]: DEBUG nova.scheduler.client.report [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 794.071435] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194132, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.306029] env[62627]: DEBUG oslo_concurrency.lockutils [req-09f9fa18-95cd-4b58-ae57-cf1f48d05f9d req-50980d53-8ebb-49d6-9084-26b1718a7b05 service nova] Releasing lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.306029] env[62627]: DEBUG nova.compute.manager [req-09f9fa18-95cd-4b58-ae57-cf1f48d05f9d req-50980d53-8ebb-49d6-9084-26b1718a7b05 service nova] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Received event network-vif-deleted-0f9d4335-0759-496e-8e45-fec3775be050 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 794.381132] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccb2ebb8-3b3b-4648-96ff-529769dc2666 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "83998771-97a7-4458-834e-df1e4b2914ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.111s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.420177] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523888f5-2c5f-b939-4004-03069962119e, 'name': SearchDatastore_Task, 'duration_secs': 0.02656} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.420468] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.420727] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 16c4065b-83cf-4b1c-92bf-66ce0a75e573/16c4065b-83cf-4b1c-92bf-66ce0a75e573.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 794.421016] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.421209] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 794.421446] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56d0c15c-2e5a-4e4d-8557-56129b3c9a04 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.423644] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1569bfb9-6e59-4adc-9bb7-1ea3fab3b39e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.431584] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 794.431584] env[62627]: value = "task-2194133" [ 794.431584] env[62627]: _type = "Task" [ 794.431584] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.437215] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 794.437412] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 794.440774] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81793375-0d3b-4303-97a5-a18a7fda23d1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.442972] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.446846] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 794.446846] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ea7c82-0a6c-0738-e1c8-62558331f4c7" [ 794.446846] env[62627]: _type = "Task" [ 794.446846] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.456165] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ea7c82-0a6c-0738-e1c8-62558331f4c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.538555] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.189s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.540835] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.360s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.542449] env[62627]: INFO nova.compute.claims [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 794.561908] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "83998771-97a7-4458-834e-df1e4b2914ee" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.562159] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "83998771-97a7-4458-834e-df1e4b2914ee" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.562662] env[62627]: DEBUG nova.compute.manager [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.563747] env[62627]: INFO nova.scheduler.client.report [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleted allocations for instance 33bf3932-3746-445b-b0fe-611466cdd9b1 [ 794.565306] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe236d0a-af28-476d-bd78-de6546139129 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.574563] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194132, 'name': ReconfigVM_Task, 'duration_secs': 0.560344} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.576402] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e/2cfcbc3b-c2b9-4155-a48a-0e73eea0589e.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 794.577034] env[62627]: DEBUG nova.compute.manager [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62627) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 794.577601] env[62627]: DEBUG nova.objects.instance [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lazy-loading 'flavor' on Instance uuid 83998771-97a7-4458-834e-df1e4b2914ee {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 794.578786] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9bfe155f-693a-4e32-87ea-330db4ffd763 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.586677] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 794.586677] env[62627]: value = "task-2194134" [ 794.586677] env[62627]: _type = "Task" [ 794.586677] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.591157] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 794.591788] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb29f0ad-67b5-4413-8f28-f71b7ced816b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.596993] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194134, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.600149] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 794.600149] env[62627]: value = "task-2194135" [ 794.600149] env[62627]: _type = "Task" [ 794.600149] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.611510] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194135, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.884990] env[62627]: DEBUG nova.compute.manager [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 794.944300] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194133, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.959831] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ea7c82-0a6c-0738-e1c8-62558331f4c7, 'name': SearchDatastore_Task, 'duration_secs': 0.041658} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.960658] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d4f1377-8e1c-4d3b-8b85-0a7d55622e0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.967569] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 794.967569] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5220d5d8-18a1-867c-0b1a-2f83f6d65bde" [ 794.967569] env[62627]: _type = "Task" [ 794.967569] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.977943] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5220d5d8-18a1-867c-0b1a-2f83f6d65bde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.079670] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac68c546-e40c-4953-9e5d-64eade1881d6 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "33bf3932-3746-445b-b0fe-611466cdd9b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.271s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.098722] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194134, 'name': Rename_Task, 'duration_secs': 0.162556} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.098992] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 795.099237] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70a962af-3328-493d-8ba5-e79437f3b7c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.109074] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194135, 'name': PowerOffVM_Task, 'duration_secs': 0.19598} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.109335] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 795.110231] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5564e9d7-c676-41aa-b884-de0a38c278c3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.115015] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 795.115015] env[62627]: value = "task-2194136" [ 795.115015] env[62627]: _type = "Task" [ 795.115015] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.133880] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a74b53-8d98-4e81-8409-5d5223c2c190 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.139868] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194136, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.169957] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 795.170259] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d047c2e-7ee3-4237-8a11-f04fb31887c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.178754] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 795.178754] env[62627]: value = "task-2194137" [ 795.178754] env[62627]: _type = "Task" [ 795.178754] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.186848] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194137, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.411469] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.444101] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194133, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6855} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.444407] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 16c4065b-83cf-4b1c-92bf-66ce0a75e573/16c4065b-83cf-4b1c-92bf-66ce0a75e573.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 795.444637] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 795.444933] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a209493-8d53-4f0f-be00-306639db8c27 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.453244] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 795.453244] env[62627]: value = "task-2194138" [ 795.453244] env[62627]: _type = "Task" [ 795.453244] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.463152] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194138, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.478744] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5220d5d8-18a1-867c-0b1a-2f83f6d65bde, 'name': SearchDatastore_Task, 'duration_secs': 0.057753} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.479074] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.479359] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] faa16f10-63f0-457c-8cd7-20a624454d86/faa16f10-63f0-457c-8cd7-20a624454d86.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 795.479628] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbd63231-c1c8-4644-b55c-a72c43b58cc1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.487565] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 795.487565] env[62627]: value = "task-2194139" [ 795.487565] env[62627]: _type = "Task" [ 795.487565] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.497217] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194139, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.585709] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 795.586042] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89ad3cc8-a6d1-4cda-8169-1fe118e0387b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.596967] env[62627]: DEBUG oslo_vmware.api [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 795.596967] env[62627]: value = "task-2194140" [ 795.596967] env[62627]: _type = "Task" [ 795.596967] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.607141] env[62627]: DEBUG oslo_vmware.api [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194140, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.627443] env[62627]: DEBUG oslo_vmware.api [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194136, 'name': PowerOnVM_Task, 'duration_secs': 0.467024} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.628065] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 795.628065] env[62627]: INFO nova.compute.manager [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Took 9.05 seconds to spawn the instance on the hypervisor. [ 795.628223] env[62627]: DEBUG nova.compute.manager [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 795.629096] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b480cb-0373-4cc1-8ca4-33f690d46fad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.642154] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.642844] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.700444] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 795.700828] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.701222] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.701494] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.701853] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.706296] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9c20610-578b-43c4-a214-7234f3ae0496 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.718097] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.718329] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.719201] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f466098-3946-4915-adf9-c75d4eacf9b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.730153] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 795.730153] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5258e1c4-601a-2191-00f1-2ec23d015992" [ 795.730153] env[62627]: _type = "Task" [ 795.730153] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.743618] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5258e1c4-601a-2191-00f1-2ec23d015992, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.968386] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194138, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.112161} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.969576] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 795.969576] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358abf1d-3fb1-41c7-9dcd-3b5cbb3aca9a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.994310] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 16c4065b-83cf-4b1c-92bf-66ce0a75e573/16c4065b-83cf-4b1c-92bf-66ce0a75e573.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 795.997992] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63555ee6-e7be-4214-8019-2b06f344ea64 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.024152] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194139, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.027956] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 796.027956] env[62627]: value = "task-2194141" [ 796.027956] env[62627]: _type = "Task" [ 796.027956] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.037420] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194141, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.110183] env[62627]: DEBUG oslo_vmware.api [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194140, 'name': PowerOffVM_Task, 'duration_secs': 0.236609} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.110450] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.110638] env[62627]: DEBUG nova.compute.manager [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 796.111449] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb0a0ae-24c1-4f5e-994a-459c40184369 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.161121] env[62627]: INFO nova.compute.manager [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Took 32.56 seconds to build instance. [ 796.162430] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 796.162430] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 796.162430] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Rebuilding the list of instances to heal {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 796.174987] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e031a2c6-b106-41d3-aff3-e8e025799822 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.184624] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61987257-8e86-4f9e-a5c0-ac31567dccae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.218557] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b949188-5926-4b34-a12a-8539123847e7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.227362] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2102af73-8865-4edf-9f51-96b53fc35555 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.248046] env[62627]: DEBUG nova.compute.provider_tree [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.252530] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5258e1c4-601a-2191-00f1-2ec23d015992, 'name': SearchDatastore_Task, 'duration_secs': 0.022414} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.253518] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4e56209-9539-4971-bed9-0183a05af784 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.259671] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 796.259671] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]524e85c7-ba21-66b3-cda9-ffc8c39cc0df" [ 796.259671] env[62627]: _type = "Task" [ 796.259671] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.268791] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524e85c7-ba21-66b3-cda9-ffc8c39cc0df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.509586] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194139, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561462} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.509909] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] faa16f10-63f0-457c-8cd7-20a624454d86/faa16f10-63f0-457c-8cd7-20a624454d86.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 796.510157] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 796.510421] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa450654-097a-4a42-9e06-191bc299c4b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.519186] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 796.519186] env[62627]: value = "task-2194142" [ 796.519186] env[62627]: _type = "Task" [ 796.519186] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.528387] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194142, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.538918] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194141, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.625902] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c91fbc73-8b18-4f5a-b593-4431e461194f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "83998771-97a7-4458-834e-df1e4b2914ee" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.063s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.665101] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8febe364-149c-4fbd-9ea6-d846a49845ef tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.192s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.669413] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Skipping network cache update for instance because it is Building. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 796.669669] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Skipping network cache update for instance because it is Building. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 796.669728] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Skipping network cache update for instance because it is Building. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 796.704341] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "refresh_cache-77418dff-6bdc-4e0a-954a-45c80960dbf1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.704535] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquired lock "refresh_cache-77418dff-6bdc-4e0a-954a-45c80960dbf1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.704688] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Forcefully refreshing network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 796.704847] env[62627]: DEBUG nova.objects.instance [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lazy-loading 'info_cache' on Instance uuid 77418dff-6bdc-4e0a-954a-45c80960dbf1 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 796.742156] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "02216be1-cdb9-421a-8a23-4a919b376595" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.742156] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "02216be1-cdb9-421a-8a23-4a919b376595" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.753992] env[62627]: DEBUG nova.scheduler.client.report [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 796.767959] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.768210] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.773751] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524e85c7-ba21-66b3-cda9-ffc8c39cc0df, 'name': SearchDatastore_Task, 'duration_secs': 0.042693} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.774554] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.774809] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] fffa23fe-3ce2-4879-b72d-11275d298504/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. {{(pid=62627) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 796.775075] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-25df140e-97a6-4aac-97ad-cd11880b0f61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.783075] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 796.783075] env[62627]: value = "task-2194143" [ 796.783075] env[62627]: _type = "Task" [ 796.783075] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.792522] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.037961] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194142, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070464} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.042235] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 797.046457] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0997aa-8d21-4b24-bd6c-ed15265fdb22 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.049962] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194141, 'name': ReconfigVM_Task, 'duration_secs': 0.528721} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.050282] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 16c4065b-83cf-4b1c-92bf-66ce0a75e573/16c4065b-83cf-4b1c-92bf-66ce0a75e573.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 797.052665] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74b4cfb1-7fc6-4864-ae06-e144dad02cb3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.074466] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] faa16f10-63f0-457c-8cd7-20a624454d86/faa16f10-63f0-457c-8cd7-20a624454d86.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.076405] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b841e45-de38-4ffd-bb5f-dbbfe88e1586 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.097949] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 797.097949] env[62627]: value = "task-2194144" [ 797.097949] env[62627]: _type = "Task" [ 797.097949] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.106095] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 797.106095] env[62627]: value = "task-2194145" [ 797.106095] env[62627]: _type = "Task" [ 797.106095] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.112513] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194144, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.121039] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194145, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.174642] env[62627]: DEBUG nova.compute.manager [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 797.259529] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.719s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.260093] env[62627]: DEBUG nova.compute.manager [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 797.263790] env[62627]: DEBUG oslo_concurrency.lockutils [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.959s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.264147] env[62627]: DEBUG nova.objects.instance [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 797.296476] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194143, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46975} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.296476] env[62627]: INFO nova.virt.vmwareapi.ds_util [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] fffa23fe-3ce2-4879-b72d-11275d298504/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. [ 797.296476] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0f5121-5d35-4197-a160-430a3f55bbaa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.322909] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] fffa23fe-3ce2-4879-b72d-11275d298504/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.324568] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ce3f0fb-a990-4e55-aa2b-5887c4ae108b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.345638] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 797.345638] env[62627]: value = "task-2194146" [ 797.345638] env[62627]: _type = "Task" [ 797.345638] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.354173] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194146, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.596765] env[62627]: DEBUG nova.compute.manager [req-3862c590-46f6-471e-9406-2f3680b03d48 req-fab497bb-5c0f-49b4-99b6-2211f36cb176 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Received event network-changed-246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 797.597044] env[62627]: DEBUG nova.compute.manager [req-3862c590-46f6-471e-9406-2f3680b03d48 req-fab497bb-5c0f-49b4-99b6-2211f36cb176 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Refreshing instance network info cache due to event network-changed-246fd9bd-81de-4162-b744-58c17169a80f. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 797.597112] env[62627]: DEBUG oslo_concurrency.lockutils [req-3862c590-46f6-471e-9406-2f3680b03d48 req-fab497bb-5c0f-49b4-99b6-2211f36cb176 service nova] Acquiring lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.597308] env[62627]: DEBUG oslo_concurrency.lockutils [req-3862c590-46f6-471e-9406-2f3680b03d48 req-fab497bb-5c0f-49b4-99b6-2211f36cb176 service nova] Acquired lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.597480] env[62627]: DEBUG nova.network.neutron [req-3862c590-46f6-471e-9406-2f3680b03d48 req-fab497bb-5c0f-49b4-99b6-2211f36cb176 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Refreshing network info cache for port 246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 797.619747] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194144, 'name': Rename_Task, 'duration_secs': 0.180088} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.619981] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 797.622433] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9977a0cd-f4ce-4342-92bd-95bdbefa76b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.625485] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194145, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.631881] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 797.631881] env[62627]: value = "task-2194147" [ 797.631881] env[62627]: _type = "Task" [ 797.631881] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.641692] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194147, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.700241] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.769506] env[62627]: DEBUG nova.compute.utils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 797.774150] env[62627]: DEBUG nova.compute.manager [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 797.774359] env[62627]: DEBUG nova.network.neutron [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 797.816854] env[62627]: DEBUG nova.policy [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5e5eee141e496db2d2ea316c43e6be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e4f7ad3ebcf4c4d97bdbce58eec5a09', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 797.855546] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194146, 'name': ReconfigVM_Task, 'duration_secs': 0.284393} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.858058] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Reconfigured VM instance instance-00000015 to attach disk [datastore2] fffa23fe-3ce2-4879-b72d-11275d298504/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 797.858910] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22266883-4f11-42a9-b505-9d81fcbaea9f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.887917] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d284395-04af-4934-9005-12cbd95de1c9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.906282] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 797.906282] env[62627]: value = "task-2194148" [ 797.906282] env[62627]: _type = "Task" [ 797.906282] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.917174] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194148, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.112222] env[62627]: DEBUG nova.compute.manager [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 798.112222] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c43dcb-eaf7-4e76-aa55-0876cd3fac51 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.125121] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194145, 'name': ReconfigVM_Task, 'duration_secs': 0.842394} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.127656] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Reconfigured VM instance instance-00000018 to attach disk [datastore1] faa16f10-63f0-457c-8cd7-20a624454d86/faa16f10-63f0-457c-8cd7-20a624454d86.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.130957] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58096acd-e9f7-44e8-a7d3-94559bb3b971 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.143929] env[62627]: DEBUG oslo_vmware.api [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194147, 'name': PowerOnVM_Task, 'duration_secs': 0.451481} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.151990] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 798.151990] env[62627]: INFO nova.compute.manager [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Took 5.95 seconds to spawn the instance on the hypervisor. [ 798.151990] env[62627]: DEBUG nova.compute.manager [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 798.151990] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 798.151990] env[62627]: value = "task-2194149" [ 798.151990] env[62627]: _type = "Task" [ 798.151990] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.151990] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e54a405-b22d-4357-8379-feac56f4b065 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.176547] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194149, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.278022] env[62627]: DEBUG nova.compute.manager [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 798.279599] env[62627]: DEBUG oslo_concurrency.lockutils [None req-792fd9ba-1fe4-4d29-a734-54f42a461995 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.280998] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.224s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.281447] env[62627]: DEBUG nova.objects.instance [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lazy-loading 'resources' on Instance uuid f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.415800] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194148, 'name': ReconfigVM_Task, 'duration_secs': 0.170531} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.418114] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 798.418114] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ee40991-b658-4815-a1e6-f19328c686b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.426016] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 798.426016] env[62627]: value = "task-2194150" [ 798.426016] env[62627]: _type = "Task" [ 798.426016] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.433991] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.538817] env[62627]: DEBUG nova.network.neutron [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Successfully created port: 21d30d49-10fb-462d-9b5f-7525c3497178 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 798.636217] env[62627]: INFO nova.compute.manager [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] instance snapshotting [ 798.636217] env[62627]: WARNING nova.compute.manager [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 798.637758] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2839a59-d885-4432-9009-0eeb12747949 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.663748] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b4e3db-048e-43e0-ab02-4451c13ad2bd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.676926] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194149, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.687484] env[62627]: INFO nova.compute.manager [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Took 32.28 seconds to build instance. [ 798.935360] env[62627]: DEBUG oslo_vmware.api [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194150, 'name': PowerOnVM_Task, 'duration_secs': 0.425999} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.937954] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 798.941360] env[62627]: DEBUG nova.compute.manager [None req-9af71126-930a-484c-899d-20b8145fd33b tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 798.944680] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7778e5f-b7b3-42bc-afa5-4828e0c98f6b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.034321] env[62627]: DEBUG nova.network.neutron [req-3862c590-46f6-471e-9406-2f3680b03d48 req-fab497bb-5c0f-49b4-99b6-2211f36cb176 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updated VIF entry in instance network info cache for port 246fd9bd-81de-4162-b744-58c17169a80f. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 799.034668] env[62627]: DEBUG nova.network.neutron [req-3862c590-46f6-471e-9406-2f3680b03d48 req-fab497bb-5c0f-49b4-99b6-2211f36cb176 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updating instance_info_cache with network_info: [{"id": "246fd9bd-81de-4162-b744-58c17169a80f", "address": "fa:16:3e:9e:4a:5e", "network": {"id": "5d65934a-2966-4094-bc55-817726caaea5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1246247596-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12d6c9026644c9cb45d687b92161121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246fd9bd-81", "ovs_interfaceid": "246fd9bd-81de-4162-b744-58c17169a80f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.043277] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Updating instance_info_cache with network_info: [{"id": "79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7", "address": "fa:16:3e:c1:30:1f", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.172", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79ffbb02-e4", "ovs_interfaceid": "79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.174799] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194149, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.178928] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 799.179243] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9b099c99-d9fb-4815-87a4-013e763d1750 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.188840] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0b71d914-9146-4f90-8375-6347ae6beffd tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "16c4065b-83cf-4b1c-92bf-66ce0a75e573" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.625s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.191794] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 799.191794] env[62627]: value = "task-2194151" [ 799.191794] env[62627]: _type = "Task" [ 799.191794] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.206091] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194151, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.292299] env[62627]: DEBUG nova.compute.manager [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 799.329952] env[62627]: DEBUG nova.virt.hardware [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 799.329952] env[62627]: DEBUG nova.virt.hardware [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.329952] env[62627]: DEBUG nova.virt.hardware [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 799.330137] env[62627]: DEBUG nova.virt.hardware [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.330137] env[62627]: DEBUG nova.virt.hardware [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 799.330137] env[62627]: DEBUG nova.virt.hardware [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 799.330137] env[62627]: DEBUG nova.virt.hardware [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 799.330137] env[62627]: DEBUG nova.virt.hardware [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 799.330254] env[62627]: DEBUG nova.virt.hardware [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 799.330254] env[62627]: DEBUG nova.virt.hardware [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 799.330254] env[62627]: DEBUG nova.virt.hardware [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 799.332157] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137bbb2b-08de-4eca-aa84-3980a7aa21c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.343358] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46898fe-19c8-4e2f-8629-17482e7cd862 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.394804] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3123547a-bd7d-490a-b88d-ad53b5070fcd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.402984] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e60f4c-5e4e-4154-8017-2e0953e77b5c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.435690] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b0f239-199a-437f-b193-6ab50d1cb1b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.445309] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac0e97f-30cc-4330-af68-2714632f0535 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.459951] env[62627]: DEBUG nova.compute.provider_tree [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.540056] env[62627]: DEBUG oslo_concurrency.lockutils [req-3862c590-46f6-471e-9406-2f3680b03d48 req-fab497bb-5c0f-49b4-99b6-2211f36cb176 service nova] Releasing lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.546584] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Releasing lock "refresh_cache-77418dff-6bdc-4e0a-954a-45c80960dbf1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.546784] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Updated the network info_cache for instance {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 799.546985] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.548013] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.548013] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.548013] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.548013] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.548013] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.548254] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 799.548321] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.670901] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194149, 'name': Rename_Task, 'duration_secs': 1.116968} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.673476] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 799.673476] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f15ffec-9ba6-47b3-8458-7a2fb7f9a3c3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.682595] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 799.682595] env[62627]: value = "task-2194152" [ 799.682595] env[62627]: _type = "Task" [ 799.682595] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.690518] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194152, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.696535] env[62627]: DEBUG nova.compute.manager [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 799.704409] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194151, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.963601] env[62627]: DEBUG nova.scheduler.client.report [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 800.051820] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.162118] env[62627]: DEBUG nova.compute.manager [req-6d81a2b5-c20a-4abc-b4d8-9697fcffecfe req-1fca5923-a45b-4298-8c38-ba7b83474bc5 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Received event network-changed-246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 800.162238] env[62627]: DEBUG nova.compute.manager [req-6d81a2b5-c20a-4abc-b4d8-9697fcffecfe req-1fca5923-a45b-4298-8c38-ba7b83474bc5 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Refreshing instance network info cache due to event network-changed-246fd9bd-81de-4162-b744-58c17169a80f. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 800.162485] env[62627]: DEBUG oslo_concurrency.lockutils [req-6d81a2b5-c20a-4abc-b4d8-9697fcffecfe req-1fca5923-a45b-4298-8c38-ba7b83474bc5 service nova] Acquiring lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.162659] env[62627]: DEBUG oslo_concurrency.lockutils [req-6d81a2b5-c20a-4abc-b4d8-9697fcffecfe req-1fca5923-a45b-4298-8c38-ba7b83474bc5 service nova] Acquired lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.162833] env[62627]: DEBUG nova.network.neutron [req-6d81a2b5-c20a-4abc-b4d8-9697fcffecfe req-1fca5923-a45b-4298-8c38-ba7b83474bc5 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Refreshing network info cache for port 246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 800.193843] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194152, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.203244] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194151, 'name': CreateSnapshot_Task, 'duration_secs': 0.817728} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.203830] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 800.204344] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2d7850-da61-42ec-af04-c04e641c4a1d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.226946] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.468730] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.471456] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.327s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.472915] env[62627]: INFO nova.compute.claims [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.494064] env[62627]: INFO nova.scheduler.client.report [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Deleted allocations for instance f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7 [ 800.691990] env[62627]: DEBUG oslo_vmware.api [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194152, 'name': PowerOnVM_Task, 'duration_secs': 0.547322} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.696406] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 800.696677] env[62627]: INFO nova.compute.manager [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Took 11.35 seconds to spawn the instance on the hypervisor. [ 800.696900] env[62627]: DEBUG nova.compute.manager [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 800.697734] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1048e28d-bfd5-4bb1-afc1-4923cb4621d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.727854] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 800.731020] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a5fe689c-bcaa-4786-8127-98c92b4ad2f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.741341] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 800.741341] env[62627]: value = "task-2194153" [ 800.741341] env[62627]: _type = "Task" [ 800.741341] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.752815] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194153, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.903705] env[62627]: INFO nova.compute.manager [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Rebuilding instance [ 800.927580] env[62627]: DEBUG nova.network.neutron [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Successfully updated port: 21d30d49-10fb-462d-9b5f-7525c3497178 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 800.988985] env[62627]: DEBUG nova.compute.manager [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 800.989506] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a624c3bf-8538-44ba-aed7-c373462e6790 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.015200] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e33b7f68-a065-4361-afff-ba33ca167387 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.031s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.092448] env[62627]: DEBUG nova.network.neutron [req-6d81a2b5-c20a-4abc-b4d8-9697fcffecfe req-1fca5923-a45b-4298-8c38-ba7b83474bc5 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updated VIF entry in instance network info cache for port 246fd9bd-81de-4162-b744-58c17169a80f. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 801.092842] env[62627]: DEBUG nova.network.neutron [req-6d81a2b5-c20a-4abc-b4d8-9697fcffecfe req-1fca5923-a45b-4298-8c38-ba7b83474bc5 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updating instance_info_cache with network_info: [{"id": "246fd9bd-81de-4162-b744-58c17169a80f", "address": "fa:16:3e:9e:4a:5e", "network": {"id": "5d65934a-2966-4094-bc55-817726caaea5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1246247596-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12d6c9026644c9cb45d687b92161121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246fd9bd-81", "ovs_interfaceid": "246fd9bd-81de-4162-b744-58c17169a80f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.224330] env[62627]: INFO nova.compute.manager [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Took 36.81 seconds to build instance. [ 801.254696] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194153, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.431851] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.431851] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.431851] env[62627]: DEBUG nova.network.neutron [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.595911] env[62627]: DEBUG oslo_concurrency.lockutils [req-6d81a2b5-c20a-4abc-b4d8-9697fcffecfe req-1fca5923-a45b-4298-8c38-ba7b83474bc5 service nova] Releasing lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.725545] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8e5b14b3-5679-493d-a7e7-8e40a8c452ea tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lock "faa16f10-63f0-457c-8cd7-20a624454d86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.314s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.755155] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194153, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.975297] env[62627]: DEBUG nova.network.neutron [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.023471] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 802.023791] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29ac2565-da2e-4e3f-8afd-da5195a5b618 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.032643] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 802.032643] env[62627]: value = "task-2194154" [ 802.032643] env[62627]: _type = "Task" [ 802.032643] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.044500] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194154, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.046285] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8033f5e-3d58-4b2c-bc9a-d1be0ef6a2f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.053460] env[62627]: DEBUG nova.compute.manager [req-a921ef6e-dc35-401a-bd98-a4ddc490a90b req-bc63fcec-f65f-4a57-a80e-c97d8ce79b0f service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Received event network-changed-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 802.053692] env[62627]: DEBUG nova.compute.manager [req-a921ef6e-dc35-401a-bd98-a4ddc490a90b req-bc63fcec-f65f-4a57-a80e-c97d8ce79b0f service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Refreshing instance network info cache due to event network-changed-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 802.053914] env[62627]: DEBUG oslo_concurrency.lockutils [req-a921ef6e-dc35-401a-bd98-a4ddc490a90b req-bc63fcec-f65f-4a57-a80e-c97d8ce79b0f service nova] Acquiring lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.054079] env[62627]: DEBUG oslo_concurrency.lockutils [req-a921ef6e-dc35-401a-bd98-a4ddc490a90b req-bc63fcec-f65f-4a57-a80e-c97d8ce79b0f service nova] Acquired lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.054247] env[62627]: DEBUG nova.network.neutron [req-a921ef6e-dc35-401a-bd98-a4ddc490a90b req-bc63fcec-f65f-4a57-a80e-c97d8ce79b0f service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Refreshing network info cache for port 4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 802.062784] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108c67a1-1f28-4861-a3db-65541a691cc8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.103869] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2e87d5-0d5f-47ca-8546-72e050d7642d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.113530] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce471c5-baae-41d4-86c8-31e9585adf6e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.129794] env[62627]: DEBUG nova.compute.provider_tree [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.228098] env[62627]: DEBUG nova.compute.manager [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 802.237255] env[62627]: DEBUG nova.network.neutron [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Updating instance_info_cache with network_info: [{"id": "21d30d49-10fb-462d-9b5f-7525c3497178", "address": "fa:16:3e:9b:65:45", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d30d49-10", "ovs_interfaceid": "21d30d49-10fb-462d-9b5f-7525c3497178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.255101] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194153, 'name': CloneVM_Task} progress is 95%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.544468] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194154, 'name': PowerOffVM_Task, 'duration_secs': 0.314012} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.544790] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 802.545067] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.545875] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0986f7e-c72c-4d3d-8309-d70eef4313aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.553095] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 802.553327] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32de3ff4-af6b-4aa8-a44e-e2af301d4994 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.587061] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 802.587152] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 802.587780] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Deleting the datastore file [datastore1] 16c4065b-83cf-4b1c-92bf-66ce0a75e573 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 802.587780] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5710d0d5-730d-4b14-82e7-80727e99580a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.596199] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 802.596199] env[62627]: value = "task-2194156" [ 802.596199] env[62627]: _type = "Task" [ 802.596199] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.605695] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194156, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.633604] env[62627]: DEBUG nova.compute.manager [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Received event network-changed-bbeb1b12-c6f2-470f-a952-37da5b4db0c9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 802.633604] env[62627]: DEBUG nova.compute.manager [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Refreshing instance network info cache due to event network-changed-bbeb1b12-c6f2-470f-a952-37da5b4db0c9. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 802.633604] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Acquiring lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.633604] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Acquired lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.633740] env[62627]: DEBUG nova.network.neutron [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Refreshing network info cache for port bbeb1b12-c6f2-470f-a952-37da5b4db0c9 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 802.636303] env[62627]: DEBUG nova.scheduler.client.report [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 802.739943] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.740598] env[62627]: DEBUG nova.compute.manager [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Instance network_info: |[{"id": "21d30d49-10fb-462d-9b5f-7525c3497178", "address": "fa:16:3e:9b:65:45", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d30d49-10", "ovs_interfaceid": "21d30d49-10fb-462d-9b5f-7525c3497178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 802.741337] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:65:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '023d6500-887e-4dc4-bec5-06b40450d9c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '21d30d49-10fb-462d-9b5f-7525c3497178', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 802.750064] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Creating folder: Project (8e4f7ad3ebcf4c4d97bdbce58eec5a09). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 802.750393] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e7428c2-0476-4ad6-8b91-73a21cc24a6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.753795] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.764944] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194153, 'name': CloneVM_Task, 'duration_secs': 1.735803} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.765239] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Created linked-clone VM from snapshot [ 802.765991] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9e0736-de9d-4ee1-a671-5a25b97db7fb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.775531] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.775829] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.777017] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.777017] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.777017] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.778606] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Uploading image e9a1cf7c-3c1c-4510-847f-fc1bacd2e5ca {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 802.781716] env[62627]: INFO nova.compute.manager [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Terminating instance [ 802.789247] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Created folder: Project (8e4f7ad3ebcf4c4d97bdbce58eec5a09) in parent group-v447541. [ 802.789247] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Creating folder: Instances. Parent ref: group-v447612. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 802.789973] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9264d36a-fd11-4577-90bc-386df7f3cddb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.806291] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Created folder: Instances in parent group-v447612. [ 802.806291] env[62627]: DEBUG oslo.service.loopingcall [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 802.806466] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 802.806757] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f4a7862-e8b4-459e-8c35-9e904512f0cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.825762] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 802.825762] env[62627]: value = "vm-447611" [ 802.825762] env[62627]: _type = "VirtualMachine" [ 802.825762] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 802.826577] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5c5be184-5b5f-42b5-9b9b-e1f2d62ffd5a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.836482] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 802.836482] env[62627]: value = "task-2194159" [ 802.836482] env[62627]: _type = "Task" [ 802.836482] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.839087] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lease: (returnval){ [ 802.839087] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52498358-a07b-7c95-a23f-4f0ee3cab797" [ 802.839087] env[62627]: _type = "HttpNfcLease" [ 802.839087] env[62627]: } obtained for exporting VM: (result){ [ 802.839087] env[62627]: value = "vm-447611" [ 802.839087] env[62627]: _type = "VirtualMachine" [ 802.839087] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 802.839639] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the lease: (returnval){ [ 802.839639] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52498358-a07b-7c95-a23f-4f0ee3cab797" [ 802.839639] env[62627]: _type = "HttpNfcLease" [ 802.839639] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 802.848158] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "77418dff-6bdc-4e0a-954a-45c80960dbf1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.848158] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "77418dff-6bdc-4e0a-954a-45c80960dbf1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.848158] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "77418dff-6bdc-4e0a-954a-45c80960dbf1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.848470] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "77418dff-6bdc-4e0a-954a-45c80960dbf1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.848559] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "77418dff-6bdc-4e0a-954a-45c80960dbf1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.855112] env[62627]: INFO nova.compute.manager [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Terminating instance [ 802.856407] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194159, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.856598] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 802.856598] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52498358-a07b-7c95-a23f-4f0ee3cab797" [ 802.856598] env[62627]: _type = "HttpNfcLease" [ 802.856598] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 802.856871] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 802.856871] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52498358-a07b-7c95-a23f-4f0ee3cab797" [ 802.856871] env[62627]: _type = "HttpNfcLease" [ 802.856871] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 802.857665] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc57c9b-d583-4c39-89f9-9af61ba31297 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.869127] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52109c69-5219-38ef-2d79-fbf9a25d21fe/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 802.869328] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52109c69-5219-38ef-2d79-fbf9a25d21fe/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 802.966203] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-319c8922-cf6f-42a7-9b7c-48b3943b41a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.082549] env[62627]: DEBUG nova.network.neutron [req-a921ef6e-dc35-401a-bd98-a4ddc490a90b req-bc63fcec-f65f-4a57-a80e-c97d8ce79b0f service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updated VIF entry in instance network info cache for port 4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 803.083199] env[62627]: DEBUG nova.network.neutron [req-a921ef6e-dc35-401a-bd98-a4ddc490a90b req-bc63fcec-f65f-4a57-a80e-c97d8ce79b0f service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updating instance_info_cache with network_info: [{"id": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "address": "fa:16:3e:e4:4d:b6", "network": {"id": "13960831-02bf-4e73-b792-efe56ca391d7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-864734567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4edac85f0a824e4798b50cf27575dfdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cb3b0a3-c2", "ovs_interfaceid": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.109318] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194156, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186258} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.110116] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.110494] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 803.110819] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.348463] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.672s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.348463] env[62627]: DEBUG nova.compute.manager [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 803.348463] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 22.881s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.348463] env[62627]: DEBUG nova.compute.manager [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 803.349659] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 803.349659] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4b2894-dd74-4bba-9197-47fefab843be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.349659] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 803.349659] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8401fc2e-2365-4778-b10c-3fee510ef5a2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.349659] env[62627]: DEBUG oslo_vmware.api [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 803.349659] env[62627]: value = "task-2194161" [ 803.349659] env[62627]: _type = "Task" [ 803.349659] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.353067] env[62627]: DEBUG oslo_vmware.api [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.355493] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194159, 'name': CreateVM_Task, 'duration_secs': 0.412485} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.360262] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 803.361206] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.361443] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.361772] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 803.362900] env[62627]: DEBUG nova.compute.manager [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 803.363148] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 803.363418] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66aeac40-2cb6-4353-9e30-909f25699df7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.365833] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3422438-49fe-4948-91f1-9464724c5c36 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.375364] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 803.375364] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52278954-9f80-ccdb-ba12-a4e52f3acc33" [ 803.375364] env[62627]: _type = "Task" [ 803.375364] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.383021] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 803.388375] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-708ee99a-ecac-488f-a4a4-757c6cf315e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.397938] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52278954-9f80-ccdb-ba12-a4e52f3acc33, 'name': SearchDatastore_Task, 'duration_secs': 0.013152} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.399816] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.400240] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 803.400668] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.400873] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.401193] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 803.401670] env[62627]: DEBUG oslo_vmware.api [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 803.401670] env[62627]: value = "task-2194162" [ 803.401670] env[62627]: _type = "Task" [ 803.401670] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.401925] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fdd38f9-f461-459f-9f55-c1d5351d16d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.418459] env[62627]: DEBUG oslo_vmware.api [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194162, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.423713] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 803.424044] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 803.425590] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b96d0882-16cb-48f6-baf4-a43553b9184c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.434526] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 803.434526] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd829b-03d1-8b84-c85e-1ae42b7f8c7e" [ 803.434526] env[62627]: _type = "Task" [ 803.434526] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.445889] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd829b-03d1-8b84-c85e-1ae42b7f8c7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.586227] env[62627]: DEBUG oslo_concurrency.lockutils [req-a921ef6e-dc35-401a-bd98-a4ddc490a90b req-bc63fcec-f65f-4a57-a80e-c97d8ce79b0f service nova] Releasing lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.692826] env[62627]: DEBUG nova.network.neutron [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Updated VIF entry in instance network info cache for port bbeb1b12-c6f2-470f-a952-37da5b4db0c9. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 803.692945] env[62627]: DEBUG nova.network.neutron [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Updating instance_info_cache with network_info: [{"id": "bbeb1b12-c6f2-470f-a952-37da5b4db0c9", "address": "fa:16:3e:d9:ed:be", "network": {"id": "5d65934a-2966-4094-bc55-817726caaea5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1246247596-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12d6c9026644c9cb45d687b92161121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbeb1b12-c6", "ovs_interfaceid": "bbeb1b12-c6f2-470f-a952-37da5b4db0c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.799278] env[62627]: DEBUG nova.compute.utils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 803.806494] env[62627]: DEBUG nova.compute.manager [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 803.806494] env[62627]: DEBUG nova.network.neutron [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.838285] env[62627]: DEBUG oslo_vmware.api [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194161, 'name': PowerOffVM_Task, 'duration_secs': 0.293801} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.838696] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 803.839319] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 803.842660] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22070471-cadc-476c-a3ff-c95b75ed62f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.886018] env[62627]: DEBUG nova.policy [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c72c1ac25b9f4aeea2c5fb37afd37598', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14238d8125674b63a7cf4452cb260283', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 803.918920] env[62627]: DEBUG oslo_vmware.api [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194162, 'name': PowerOffVM_Task, 'duration_secs': 0.242746} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.919814] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 803.920141] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 803.920954] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-064a8032-5d1e-43d9-a659-6375e4091a9e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.927084] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 803.928022] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 803.928022] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Deleting the datastore file [datastore2] 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 803.928884] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d04c7f3-fd6d-4a2a-8ecb-6af0e160a9bd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.941099] env[62627]: DEBUG oslo_vmware.api [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 803.941099] env[62627]: value = "task-2194165" [ 803.941099] env[62627]: _type = "Task" [ 803.941099] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.952466] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd829b-03d1-8b84-c85e-1ae42b7f8c7e, 'name': SearchDatastore_Task, 'duration_secs': 0.014073} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.953831] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a81aef7d-a066-4766-b61d-b99464e81b4c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.959831] env[62627]: DEBUG oslo_vmware.api [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194165, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.967626] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 803.967626] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52082cac-6972-027c-4e70-0daa643543e8" [ 803.967626] env[62627]: _type = "Task" [ 803.967626] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.979498] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52082cac-6972-027c-4e70-0daa643543e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.004019] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 804.004019] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 804.004019] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Deleting the datastore file [datastore2] 77418dff-6bdc-4e0a-954a-45c80960dbf1 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 804.004019] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e0f28da-316e-40b9-9124-5f7f8e1237ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.012636] env[62627]: DEBUG oslo_vmware.api [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for the task: (returnval){ [ 804.012636] env[62627]: value = "task-2194166" [ 804.012636] env[62627]: _type = "Task" [ 804.012636] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.027468] env[62627]: DEBUG oslo_vmware.api [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.159203] env[62627]: DEBUG nova.virt.hardware [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 804.159313] env[62627]: DEBUG nova.virt.hardware [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.159383] env[62627]: DEBUG nova.virt.hardware [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 804.159561] env[62627]: DEBUG nova.virt.hardware [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.159697] env[62627]: DEBUG nova.virt.hardware [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 804.159845] env[62627]: DEBUG nova.virt.hardware [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 804.160180] env[62627]: DEBUG nova.virt.hardware [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 804.160359] env[62627]: DEBUG nova.virt.hardware [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 804.160564] env[62627]: DEBUG nova.virt.hardware [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 804.160971] env[62627]: DEBUG nova.virt.hardware [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 804.161128] env[62627]: DEBUG nova.virt.hardware [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 804.162108] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cf92ff-78d1-4fab-b901-228c80ef7a32 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.175483] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c2e929-3c0b-4c3a-a167-14ce01c00564 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.190751] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 804.196583] env[62627]: DEBUG oslo.service.loopingcall [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 804.196919] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 804.197187] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-073f6c0f-bd8a-4062-be6e-84f5e0b9fb92 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.214717] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Releasing lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.214963] env[62627]: DEBUG nova.compute.manager [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Received event network-vif-plugged-21d30d49-10fb-462d-9b5f-7525c3497178 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 804.215175] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Acquiring lock "a12855e2-1544-4e85-a7ff-c026fec861b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.216519] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Lock "a12855e2-1544-4e85-a7ff-c026fec861b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.216519] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Lock "a12855e2-1544-4e85-a7ff-c026fec861b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.216519] env[62627]: DEBUG nova.compute.manager [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] No waiting events found dispatching network-vif-plugged-21d30d49-10fb-462d-9b5f-7525c3497178 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 804.216519] env[62627]: WARNING nova.compute.manager [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Received unexpected event network-vif-plugged-21d30d49-10fb-462d-9b5f-7525c3497178 for instance with vm_state building and task_state spawning. [ 804.216519] env[62627]: DEBUG nova.compute.manager [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Received event network-changed-21d30d49-10fb-462d-9b5f-7525c3497178 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 804.216989] env[62627]: DEBUG nova.compute.manager [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Refreshing instance network info cache due to event network-changed-21d30d49-10fb-462d-9b5f-7525c3497178. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 804.216989] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Acquiring lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.216989] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Acquired lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.216989] env[62627]: DEBUG nova.network.neutron [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Refreshing network info cache for port 21d30d49-10fb-462d-9b5f-7525c3497178 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 804.226412] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 804.226412] env[62627]: value = "task-2194167" [ 804.226412] env[62627]: _type = "Task" [ 804.226412] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.236679] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194167, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.309459] env[62627]: DEBUG nova.compute.manager [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 804.419453] env[62627]: DEBUG nova.network.neutron [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Successfully created port: 85865adb-92d3-4f6e-a7af-6ddd191d13d3 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.464448] env[62627]: DEBUG oslo_vmware.api [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194165, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314076} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.464610] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 804.470038] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 804.470038] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 804.470038] env[62627]: INFO nova.compute.manager [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Took 1.18 seconds to destroy the instance on the hypervisor. [ 804.470038] env[62627]: DEBUG oslo.service.loopingcall [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 804.470038] env[62627]: DEBUG nova.compute.manager [-] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 804.470370] env[62627]: DEBUG nova.network.neutron [-] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 804.488885] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52082cac-6972-027c-4e70-0daa643543e8, 'name': SearchDatastore_Task, 'duration_secs': 0.024706} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.489152] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.489430] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] a12855e2-1544-4e85-a7ff-c026fec861b6/a12855e2-1544-4e85-a7ff-c026fec861b6.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 804.489684] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-323e0b32-807e-44ae-bae3-0dfa47213aa8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.500193] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 804.500193] env[62627]: value = "task-2194168" [ 804.500193] env[62627]: _type = "Task" [ 804.500193] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.517937] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.535766] env[62627]: DEBUG oslo_vmware.api [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Task: {'id': task-2194166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318947} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.540577] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 804.540814] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 804.541206] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 804.541476] env[62627]: INFO nova.compute.manager [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Took 1.18 seconds to destroy the instance on the hypervisor. [ 804.541883] env[62627]: DEBUG oslo.service.loopingcall [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 804.542571] env[62627]: DEBUG nova.compute.manager [-] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 804.542728] env[62627]: DEBUG nova.network.neutron [-] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 804.575748] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07813bdf-f863-49c0-8da0-ff6db2895503 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.586681] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c194b63b-2d92-46ee-a6a0-2602dbafdffa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.625026] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bac44a6-0b43-4234-99a9-3d3d5df56a4d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.638703] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6bf3cdf-9d67-4564-bd05-e77fc2562984 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.655680] env[62627]: DEBUG nova.compute.provider_tree [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.738802] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194167, 'name': CreateVM_Task, 'duration_secs': 0.382822} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.738964] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 804.739436] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.739599] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.739993] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 804.740273] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60bcff44-33d0-4626-aa6b-69966ee0e14c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.747895] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 804.747895] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f72c52-7c2c-11cf-b1f9-62e3ea483f89" [ 804.747895] env[62627]: _type = "Task" [ 804.747895] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.758813] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f72c52-7c2c-11cf-b1f9-62e3ea483f89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.017982] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194168, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.163320] env[62627]: DEBUG nova.scheduler.client.report [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 805.184347] env[62627]: DEBUG nova.compute.manager [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Received event network-changed-5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 805.184570] env[62627]: DEBUG nova.compute.manager [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Refreshing instance network info cache due to event network-changed-5db92929-a96d-41c2-be2b-95c9ef62c4bf. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 805.184693] env[62627]: DEBUG oslo_concurrency.lockutils [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] Acquiring lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.184834] env[62627]: DEBUG oslo_concurrency.lockutils [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] Acquired lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.184991] env[62627]: DEBUG nova.network.neutron [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Refreshing network info cache for port 5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 805.261270] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f72c52-7c2c-11cf-b1f9-62e3ea483f89, 'name': SearchDatastore_Task, 'duration_secs': 0.013744} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.262098] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.262346] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 805.262575] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.262722] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.263572] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 805.263572] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-353743f3-3f5a-413d-85d3-3ab9e8b1b1c3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.273174] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 805.273369] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 805.274145] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02eb0d5a-b3ee-492d-9e78-d8a113257eb4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.280284] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 805.280284] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525cc99c-8f2a-7e97-0163-421550527a98" [ 805.280284] env[62627]: _type = "Task" [ 805.280284] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.289336] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525cc99c-8f2a-7e97-0163-421550527a98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.320573] env[62627]: DEBUG nova.compute.manager [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 805.355911] env[62627]: DEBUG nova.virt.hardware [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 805.356497] env[62627]: DEBUG nova.virt.hardware [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.356497] env[62627]: DEBUG nova.virt.hardware [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 805.356497] env[62627]: DEBUG nova.virt.hardware [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.356663] env[62627]: DEBUG nova.virt.hardware [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 805.356768] env[62627]: DEBUG nova.virt.hardware [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 805.356979] env[62627]: DEBUG nova.virt.hardware [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 805.361021] env[62627]: DEBUG nova.virt.hardware [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 805.361021] env[62627]: DEBUG nova.virt.hardware [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 805.361021] env[62627]: DEBUG nova.virt.hardware [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 805.361021] env[62627]: DEBUG nova.virt.hardware [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 805.361021] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507c2902-dba5-4f91-869b-e585070ca69c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.372814] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75aaf7f6-bb8a-4ed1-9171-cfd491847397 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.493828] env[62627]: DEBUG nova.network.neutron [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Updated VIF entry in instance network info cache for port 21d30d49-10fb-462d-9b5f-7525c3497178. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 805.493828] env[62627]: DEBUG nova.network.neutron [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Updating instance_info_cache with network_info: [{"id": "21d30d49-10fb-462d-9b5f-7525c3497178", "address": "fa:16:3e:9b:65:45", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d30d49-10", "ovs_interfaceid": "21d30d49-10fb-462d-9b5f-7525c3497178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.513273] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194168, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.65977} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.513566] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] a12855e2-1544-4e85-a7ff-c026fec861b6/a12855e2-1544-4e85-a7ff-c026fec861b6.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 805.513997] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 805.514081] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a0f4ecb-0a4e-4cce-898e-17f96a4f867d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.518212] env[62627]: DEBUG nova.compute.manager [req-58312547-be2d-406b-8615-d534707b72a8 req-21f2018d-76ab-46bc-8624-0d60d7673591 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Received event network-changed-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 805.518404] env[62627]: DEBUG nova.compute.manager [req-58312547-be2d-406b-8615-d534707b72a8 req-21f2018d-76ab-46bc-8624-0d60d7673591 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Refreshing instance network info cache due to event network-changed-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 805.518534] env[62627]: DEBUG oslo_concurrency.lockutils [req-58312547-be2d-406b-8615-d534707b72a8 req-21f2018d-76ab-46bc-8624-0d60d7673591 service nova] Acquiring lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.518618] env[62627]: DEBUG oslo_concurrency.lockutils [req-58312547-be2d-406b-8615-d534707b72a8 req-21f2018d-76ab-46bc-8624-0d60d7673591 service nova] Acquired lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.518769] env[62627]: DEBUG nova.network.neutron [req-58312547-be2d-406b-8615-d534707b72a8 req-21f2018d-76ab-46bc-8624-0d60d7673591 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Refreshing network info cache for port 4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 805.528118] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 805.528118] env[62627]: value = "task-2194169" [ 805.528118] env[62627]: _type = "Task" [ 805.528118] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.538501] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194169, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.791696] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525cc99c-8f2a-7e97-0163-421550527a98, 'name': SearchDatastore_Task, 'duration_secs': 0.014314} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.792507] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fe57f7b-c68f-405d-bb80-9709a7a619a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.799658] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 805.799658] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234b7ea-97b9-17a6-9234-8a55d2223dd1" [ 805.799658] env[62627]: _type = "Task" [ 805.799658] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.812059] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234b7ea-97b9-17a6-9234-8a55d2223dd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.916404] env[62627]: DEBUG nova.network.neutron [-] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.946317] env[62627]: DEBUG nova.network.neutron [-] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.997377] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Releasing lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.997799] env[62627]: DEBUG nova.compute.manager [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Received event network-changed-bbeb1b12-c6f2-470f-a952-37da5b4db0c9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 805.998047] env[62627]: DEBUG nova.compute.manager [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Refreshing instance network info cache due to event network-changed-bbeb1b12-c6f2-470f-a952-37da5b4db0c9. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 805.998261] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Acquiring lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.998423] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Acquired lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.998667] env[62627]: DEBUG nova.network.neutron [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Refreshing network info cache for port bbeb1b12-c6f2-470f-a952-37da5b4db0c9 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 806.045059] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194169, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072469} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.045059] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 806.045059] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ebeea6-4761-40ef-9d37-dd59d334fc4f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.077526] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] a12855e2-1544-4e85-a7ff-c026fec861b6/a12855e2-1544-4e85-a7ff-c026fec861b6.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 806.077908] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-922dc93d-e26f-40d8-abe5-13312764d7de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.103817] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 806.103817] env[62627]: value = "task-2194170" [ 806.103817] env[62627]: _type = "Task" [ 806.103817] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.115430] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194170, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.142789] env[62627]: DEBUG nova.network.neutron [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updated VIF entry in instance network info cache for port 5db92929-a96d-41c2-be2b-95c9ef62c4bf. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 806.142789] env[62627]: DEBUG nova.network.neutron [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updating instance_info_cache with network_info: [{"id": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "address": "fa:16:3e:c2:49:86", "network": {"id": "c9ffbe6a-63fb-4479-bc68-6209e534cbe0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-213656681-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d36f0395b094ef4aa913b538a31f6a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db92929-a9", "ovs_interfaceid": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.180671] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.031s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.184589] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.130s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.186027] env[62627]: INFO nova.compute.claims [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 806.313494] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234b7ea-97b9-17a6-9234-8a55d2223dd1, 'name': SearchDatastore_Task, 'duration_secs': 0.016148} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.313798] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.314069] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 16c4065b-83cf-4b1c-92bf-66ce0a75e573/16c4065b-83cf-4b1c-92bf-66ce0a75e573.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 806.317317] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64ba0fec-63ad-4168-954f-e18278ab00ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.327084] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 806.327084] env[62627]: value = "task-2194171" [ 806.327084] env[62627]: _type = "Task" [ 806.327084] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.337032] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194171, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.419094] env[62627]: INFO nova.compute.manager [-] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Took 1.95 seconds to deallocate network for instance. [ 806.448719] env[62627]: INFO nova.compute.manager [-] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Took 1.91 seconds to deallocate network for instance. [ 806.508080] env[62627]: DEBUG nova.network.neutron [req-58312547-be2d-406b-8615-d534707b72a8 req-21f2018d-76ab-46bc-8624-0d60d7673591 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updated VIF entry in instance network info cache for port 4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 806.508482] env[62627]: DEBUG nova.network.neutron [req-58312547-be2d-406b-8615-d534707b72a8 req-21f2018d-76ab-46bc-8624-0d60d7673591 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updating instance_info_cache with network_info: [{"id": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "address": "fa:16:3e:e4:4d:b6", "network": {"id": "13960831-02bf-4e73-b792-efe56ca391d7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-864734567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4edac85f0a824e4798b50cf27575dfdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cb3b0a3-c2", "ovs_interfaceid": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.544305] env[62627]: DEBUG nova.network.neutron [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.620106] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194170, 'name': ReconfigVM_Task, 'duration_secs': 0.492733} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.620106] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Reconfigured VM instance instance-0000001a to attach disk [datastore1] a12855e2-1544-4e85-a7ff-c026fec861b6/a12855e2-1544-4e85-a7ff-c026fec861b6.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 806.621233] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b2c5c59-cfaa-47d1-a18d-67541c30cdf5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.631272] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 806.631272] env[62627]: value = "task-2194172" [ 806.631272] env[62627]: _type = "Task" [ 806.631272] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.643235] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194172, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.647251] env[62627]: DEBUG oslo_concurrency.lockutils [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] Releasing lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.647251] env[62627]: DEBUG nova.compute.manager [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Received event network-changed-246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 806.647251] env[62627]: DEBUG nova.compute.manager [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Refreshing instance network info cache due to event network-changed-246fd9bd-81de-4162-b744-58c17169a80f. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 806.647364] env[62627]: DEBUG oslo_concurrency.lockutils [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] Acquiring lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.647668] env[62627]: DEBUG oslo_concurrency.lockutils [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] Acquired lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.648087] env[62627]: DEBUG nova.network.neutron [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Refreshing network info cache for port 246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 806.734252] env[62627]: DEBUG nova.network.neutron [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.775489] env[62627]: INFO nova.scheduler.client.report [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Deleted allocation for migration ee86539f-a2cf-4a4f-a1a2-80c788c57dd2 [ 806.841748] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194171, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.927965] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.958443] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.012894] env[62627]: DEBUG oslo_concurrency.lockutils [req-58312547-be2d-406b-8615-d534707b72a8 req-21f2018d-76ab-46bc-8624-0d60d7673591 service nova] Releasing lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.142773] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194172, 'name': Rename_Task, 'duration_secs': 0.216309} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.143115] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 807.143304] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf89dcf0-9142-43ec-abc3-32ed7e3642c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.150366] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 807.150366] env[62627]: value = "task-2194173" [ 807.150366] env[62627]: _type = "Task" [ 807.150366] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.160457] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194173, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.239612] env[62627]: DEBUG oslo_concurrency.lockutils [req-436a2b1d-43ab-4bf7-b86c-7079bf626944 req-363f2fe1-772c-4d4e-821e-160fe50a9512 service nova] Releasing lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.264556] env[62627]: DEBUG nova.network.neutron [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Successfully updated port: 85865adb-92d3-4f6e-a7af-6ddd191d13d3 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 807.286762] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fd2d46c3-107d-480c-9d9b-a5a305c61bfc tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 30.623s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.338839] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194171, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656713} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.339099] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 16c4065b-83cf-4b1c-92bf-66ce0a75e573/16c4065b-83cf-4b1c-92bf-66ce0a75e573.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 807.339262] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 807.341978] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0a4b968-19ee-43b0-bdc2-717eca2623ca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.349177] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 807.349177] env[62627]: value = "task-2194174" [ 807.349177] env[62627]: _type = "Task" [ 807.349177] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.361988] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194174, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.494858] env[62627]: DEBUG nova.network.neutron [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updated VIF entry in instance network info cache for port 246fd9bd-81de-4162-b744-58c17169a80f. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 807.495296] env[62627]: DEBUG nova.network.neutron [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updating instance_info_cache with network_info: [{"id": "246fd9bd-81de-4162-b744-58c17169a80f", "address": "fa:16:3e:9e:4a:5e", "network": {"id": "5d65934a-2966-4094-bc55-817726caaea5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1246247596-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12d6c9026644c9cb45d687b92161121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246fd9bd-81", "ovs_interfaceid": "246fd9bd-81de-4162-b744-58c17169a80f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.572498] env[62627]: DEBUG nova.compute.manager [req-1cade3b7-b557-4bd3-a728-3ed68b5c709f req-4fe06b77-2492-4713-8fd3-59d3fb76c719 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Received event network-changed-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 807.572749] env[62627]: DEBUG nova.compute.manager [req-1cade3b7-b557-4bd3-a728-3ed68b5c709f req-4fe06b77-2492-4713-8fd3-59d3fb76c719 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Refreshing instance network info cache due to event network-changed-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 807.573011] env[62627]: DEBUG oslo_concurrency.lockutils [req-1cade3b7-b557-4bd3-a728-3ed68b5c709f req-4fe06b77-2492-4713-8fd3-59d3fb76c719 service nova] Acquiring lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.573220] env[62627]: DEBUG oslo_concurrency.lockutils [req-1cade3b7-b557-4bd3-a728-3ed68b5c709f req-4fe06b77-2492-4713-8fd3-59d3fb76c719 service nova] Acquired lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.574475] env[62627]: DEBUG nova.network.neutron [req-1cade3b7-b557-4bd3-a728-3ed68b5c709f req-4fe06b77-2492-4713-8fd3-59d3fb76c719 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Refreshing network info cache for port 4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.663263] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194173, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.770853] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "refresh_cache-b1ba58c1-340f-486a-8f47-86fbc7b27485" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.771112] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "refresh_cache-b1ba58c1-340f-486a-8f47-86fbc7b27485" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.771248] env[62627]: DEBUG nova.network.neutron [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.794548] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f24eb7-a736-4708-9824-1d5fe103aadb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.803578] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135e5898-b6f1-40ff-b202-a302393d73a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.846562] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805af320-322c-4a1b-9ac3-77096ce5d9bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.856359] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff58894-aa3f-4c9c-92c5-32182c9fbf4c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.865698] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194174, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069473} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.873893] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 807.875009] env[62627]: DEBUG nova.compute.provider_tree [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.877951] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f02b17-d716-44c9-bb5b-1f63e7e2be96 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.902250] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] 16c4065b-83cf-4b1c-92bf-66ce0a75e573/16c4065b-83cf-4b1c-92bf-66ce0a75e573.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 807.902250] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd9c6db2-3bc4-4daa-9517-feb8af05df63 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.924144] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 807.924144] env[62627]: value = "task-2194175" [ 807.924144] env[62627]: _type = "Task" [ 807.924144] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.931723] env[62627]: DEBUG nova.compute.manager [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Received event network-vif-deleted-bbeb1b12-c6f2-470f-a952-37da5b4db0c9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 807.931931] env[62627]: DEBUG nova.compute.manager [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Received event network-vif-deleted-79ffbb02-e48b-42f6-a1c3-8f9a38f02aa7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 807.932111] env[62627]: DEBUG nova.compute.manager [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Received event network-changed-246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 807.932269] env[62627]: DEBUG nova.compute.manager [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Refreshing instance network info cache due to event network-changed-246fd9bd-81de-4162-b744-58c17169a80f. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 807.932446] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] Acquiring lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.941134] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194175, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.000510] env[62627]: DEBUG oslo_concurrency.lockutils [req-49eec9cc-624e-469c-8c16-62a584132069 req-d394c4bb-4937-42c9-8ee7-3610d6ff7515 service nova] Releasing lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.001102] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] Acquired lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.001154] env[62627]: DEBUG nova.network.neutron [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Refreshing network info cache for port 246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.046413] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.161095] env[62627]: DEBUG oslo_vmware.api [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194173, 'name': PowerOnVM_Task, 'duration_secs': 0.779135} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.161523] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 808.161523] env[62627]: INFO nova.compute.manager [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Took 8.87 seconds to spawn the instance on the hypervisor. [ 808.161997] env[62627]: DEBUG nova.compute.manager [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 808.162516] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc407f4-19da-498a-8374-77067c4c4035 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.350053] env[62627]: DEBUG nova.network.neutron [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.383684] env[62627]: DEBUG nova.scheduler.client.report [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 808.442543] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194175, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.476061] env[62627]: DEBUG nova.network.neutron [req-1cade3b7-b557-4bd3-a728-3ed68b5c709f req-4fe06b77-2492-4713-8fd3-59d3fb76c719 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updated VIF entry in instance network info cache for port 4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.476422] env[62627]: DEBUG nova.network.neutron [req-1cade3b7-b557-4bd3-a728-3ed68b5c709f req-4fe06b77-2492-4713-8fd3-59d3fb76c719 service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updating instance_info_cache with network_info: [{"id": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "address": "fa:16:3e:e4:4d:b6", "network": {"id": "13960831-02bf-4e73-b792-efe56ca391d7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-864734567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4edac85f0a824e4798b50cf27575dfdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cb3b0a3-c2", "ovs_interfaceid": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.682858] env[62627]: INFO nova.compute.manager [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Took 36.52 seconds to build instance. [ 808.759450] env[62627]: DEBUG nova.network.neutron [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Updating instance_info_cache with network_info: [{"id": "85865adb-92d3-4f6e-a7af-6ddd191d13d3", "address": "fa:16:3e:90:63:cd", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85865adb-92", "ovs_interfaceid": "85865adb-92d3-4f6e-a7af-6ddd191d13d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.890731] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.891764] env[62627]: DEBUG nova.compute.manager [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 808.898616] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.790s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.903267] env[62627]: INFO nova.compute.claims [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.934037] env[62627]: DEBUG nova.network.neutron [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updated VIF entry in instance network info cache for port 246fd9bd-81de-4162-b744-58c17169a80f. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.934037] env[62627]: DEBUG nova.network.neutron [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updating instance_info_cache with network_info: [{"id": "246fd9bd-81de-4162-b744-58c17169a80f", "address": "fa:16:3e:9e:4a:5e", "network": {"id": "5d65934a-2966-4094-bc55-817726caaea5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1246247596-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b12d6c9026644c9cb45d687b92161121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246fd9bd-81", "ovs_interfaceid": "246fd9bd-81de-4162-b744-58c17169a80f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.938438] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194175, 'name': ReconfigVM_Task, 'duration_secs': 0.553718} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.943705] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Reconfigured VM instance instance-00000019 to attach disk [datastore2] 16c4065b-83cf-4b1c-92bf-66ce0a75e573/16c4065b-83cf-4b1c-92bf-66ce0a75e573.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 808.943705] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9363df35-1fa2-45e8-a74f-31936a7b2cdc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.950727] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 808.950727] env[62627]: value = "task-2194176" [ 808.950727] env[62627]: _type = "Task" [ 808.950727] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.963451] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194176, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.979142] env[62627]: DEBUG oslo_concurrency.lockutils [req-1cade3b7-b557-4bd3-a728-3ed68b5c709f req-4fe06b77-2492-4713-8fd3-59d3fb76c719 service nova] Releasing lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.186764] env[62627]: DEBUG oslo_concurrency.lockutils [None req-be72c443-c175-4ec4-ae24-4a7654e939c0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "a12855e2-1544-4e85-a7ff-c026fec861b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.477s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.261920] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "refresh_cache-b1ba58c1-340f-486a-8f47-86fbc7b27485" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.262347] env[62627]: DEBUG nova.compute.manager [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Instance network_info: |[{"id": "85865adb-92d3-4f6e-a7af-6ddd191d13d3", "address": "fa:16:3e:90:63:cd", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85865adb-92", "ovs_interfaceid": "85865adb-92d3-4f6e-a7af-6ddd191d13d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 809.262872] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:63:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85865adb-92d3-4f6e-a7af-6ddd191d13d3', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 809.270844] env[62627]: DEBUG oslo.service.loopingcall [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 809.271548] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 809.271835] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e50e4d1-e8fa-4096-83e1-020a75a161f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.291997] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 809.291997] env[62627]: value = "task-2194177" [ 809.291997] env[62627]: _type = "Task" [ 809.291997] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.302648] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194177, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.413965] env[62627]: DEBUG nova.compute.utils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 809.416699] env[62627]: DEBUG nova.compute.manager [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 809.416881] env[62627]: DEBUG nova.network.neutron [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 809.440114] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] Releasing lock "refresh_cache-a674db15-ea4e-44c8-abc1-6af425286517" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.440114] env[62627]: DEBUG nova.compute.manager [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Received event network-vif-plugged-85865adb-92d3-4f6e-a7af-6ddd191d13d3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 809.440114] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] Acquiring lock "b1ba58c1-340f-486a-8f47-86fbc7b27485-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.440396] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] Lock "b1ba58c1-340f-486a-8f47-86fbc7b27485-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.440485] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] Lock "b1ba58c1-340f-486a-8f47-86fbc7b27485-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.440668] env[62627]: DEBUG nova.compute.manager [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] No waiting events found dispatching network-vif-plugged-85865adb-92d3-4f6e-a7af-6ddd191d13d3 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 809.440839] env[62627]: WARNING nova.compute.manager [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Received unexpected event network-vif-plugged-85865adb-92d3-4f6e-a7af-6ddd191d13d3 for instance with vm_state building and task_state spawning. [ 809.441071] env[62627]: DEBUG nova.compute.manager [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Received event network-changed-85865adb-92d3-4f6e-a7af-6ddd191d13d3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 809.441263] env[62627]: DEBUG nova.compute.manager [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Refreshing instance network info cache due to event network-changed-85865adb-92d3-4f6e-a7af-6ddd191d13d3. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 809.441575] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] Acquiring lock "refresh_cache-b1ba58c1-340f-486a-8f47-86fbc7b27485" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.441646] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] Acquired lock "refresh_cache-b1ba58c1-340f-486a-8f47-86fbc7b27485" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.441861] env[62627]: DEBUG nova.network.neutron [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Refreshing network info cache for port 85865adb-92d3-4f6e-a7af-6ddd191d13d3 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 809.464208] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194176, 'name': Rename_Task, 'duration_secs': 0.217759} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.464533] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 809.464811] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98512d18-9906-4875-a151-a0ebc0fa6a2d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.471693] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 809.471693] env[62627]: value = "task-2194178" [ 809.471693] env[62627]: _type = "Task" [ 809.471693] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.480466] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194178, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.506530] env[62627]: DEBUG nova.policy [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '000a2d4da0ef43c095a9706586aa8177', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '376915f329c74e4585a7da4c636d1732', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 809.689453] env[62627]: DEBUG nova.compute.manager [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 809.810822] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194177, 'name': CreateVM_Task, 'duration_secs': 0.396608} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.811042] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 809.811809] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.812010] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.812386] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 809.812983] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0fb9ae9-39a7-49d4-830e-3125516db433 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.818434] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 809.818434] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52227a94-1b8f-77cd-5243-1c937c5dba4f" [ 809.818434] env[62627]: _type = "Task" [ 809.818434] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.823846] env[62627]: DEBUG oslo_concurrency.lockutils [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquiring lock "fffa23fe-3ce2-4879-b72d-11275d298504" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.824092] env[62627]: DEBUG oslo_concurrency.lockutils [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Lock "fffa23fe-3ce2-4879-b72d-11275d298504" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.824200] env[62627]: DEBUG oslo_concurrency.lockutils [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquiring lock "fffa23fe-3ce2-4879-b72d-11275d298504-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.824425] env[62627]: DEBUG oslo_concurrency.lockutils [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Lock "fffa23fe-3ce2-4879-b72d-11275d298504-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.824654] env[62627]: DEBUG oslo_concurrency.lockutils [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Lock "fffa23fe-3ce2-4879-b72d-11275d298504-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.830722] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52227a94-1b8f-77cd-5243-1c937c5dba4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.831331] env[62627]: INFO nova.compute.manager [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Terminating instance [ 809.868165] env[62627]: DEBUG nova.network.neutron [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Successfully created port: bb608229-71bb-45a8-8815-5bb1cda6c5b0 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.885193] env[62627]: DEBUG nova.compute.manager [req-0f665ac7-1542-4ac8-a57d-e104eacb2ac0 req-eadefcdd-01a8-461b-8a03-082c04e6dbff service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Received event network-changed-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 809.885193] env[62627]: DEBUG nova.compute.manager [req-0f665ac7-1542-4ac8-a57d-e104eacb2ac0 req-eadefcdd-01a8-461b-8a03-082c04e6dbff service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Refreshing instance network info cache due to event network-changed-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 809.885193] env[62627]: DEBUG oslo_concurrency.lockutils [req-0f665ac7-1542-4ac8-a57d-e104eacb2ac0 req-eadefcdd-01a8-461b-8a03-082c04e6dbff service nova] Acquiring lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.885193] env[62627]: DEBUG oslo_concurrency.lockutils [req-0f665ac7-1542-4ac8-a57d-e104eacb2ac0 req-eadefcdd-01a8-461b-8a03-082c04e6dbff service nova] Acquired lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.885193] env[62627]: DEBUG nova.network.neutron [req-0f665ac7-1542-4ac8-a57d-e104eacb2ac0 req-eadefcdd-01a8-461b-8a03-082c04e6dbff service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Refreshing network info cache for port 4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 809.921636] env[62627]: DEBUG nova.compute.manager [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 809.985507] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194178, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.215564] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.343420] env[62627]: DEBUG nova.compute.manager [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 810.343669] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 810.344610] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52227a94-1b8f-77cd-5243-1c937c5dba4f, 'name': SearchDatastore_Task, 'duration_secs': 0.011565} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.346341] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73f1d4a-d2dc-48c0-a24a-31764401ac96 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.349812] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.350119] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 810.350420] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.350622] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.350870] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 810.351422] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b18ca85f-532a-4d48-b358-f85aff93b844 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.360034] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 810.360377] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71e9146b-2e3d-4c53-bd4a-597e7f925b93 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.363849] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 810.364089] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 810.364861] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86ef7236-d524-4fc6-be81-84f9a752cb81 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.368777] env[62627]: DEBUG oslo_vmware.api [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 810.368777] env[62627]: value = "task-2194179" [ 810.368777] env[62627]: _type = "Task" [ 810.368777] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.374630] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 810.374630] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d566c4-7bd2-c244-205a-e4eb7f5b6f76" [ 810.374630] env[62627]: _type = "Task" [ 810.374630] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.385277] env[62627]: DEBUG oslo_vmware.api [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194179, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.394492] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d566c4-7bd2-c244-205a-e4eb7f5b6f76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.411561] env[62627]: DEBUG nova.network.neutron [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Updated VIF entry in instance network info cache for port 85865adb-92d3-4f6e-a7af-6ddd191d13d3. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 810.411997] env[62627]: DEBUG nova.network.neutron [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Updating instance_info_cache with network_info: [{"id": "85865adb-92d3-4f6e-a7af-6ddd191d13d3", "address": "fa:16:3e:90:63:cd", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85865adb-92", "ovs_interfaceid": "85865adb-92d3-4f6e-a7af-6ddd191d13d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.488674] env[62627]: DEBUG oslo_vmware.api [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194178, 'name': PowerOnVM_Task, 'duration_secs': 0.644072} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.488887] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 810.489093] env[62627]: DEBUG nova.compute.manager [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 810.489916] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba7e63f-0ec0-4a7f-b5bb-dc1f251a3089 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.571686] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed74b7d-a46e-45b4-abc4-965351764c8d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.584705] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce32bc0-4658-40ac-91b9-3edd09a977d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.627744] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf10cdb8-2c1f-4ec4-aade-83014d419e23 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.637637] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4ee602-c1fd-40e4-80bc-8f655a7c7e9b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.653160] env[62627]: DEBUG nova.compute.provider_tree [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.743157] env[62627]: DEBUG nova.network.neutron [req-0f665ac7-1542-4ac8-a57d-e104eacb2ac0 req-eadefcdd-01a8-461b-8a03-082c04e6dbff service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updated VIF entry in instance network info cache for port 4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 810.743571] env[62627]: DEBUG nova.network.neutron [req-0f665ac7-1542-4ac8-a57d-e104eacb2ac0 req-eadefcdd-01a8-461b-8a03-082c04e6dbff service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updating instance_info_cache with network_info: [{"id": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "address": "fa:16:3e:e4:4d:b6", "network": {"id": "13960831-02bf-4e73-b792-efe56ca391d7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-864734567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4edac85f0a824e4798b50cf27575dfdb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cb3b0a3-c2", "ovs_interfaceid": "4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.759804] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52109c69-5219-38ef-2d79-fbf9a25d21fe/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 810.759804] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a117fa4-4e77-40fd-b208-43f874b7626a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.767641] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52109c69-5219-38ef-2d79-fbf9a25d21fe/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 810.767641] env[62627]: ERROR oslo_vmware.rw_handles [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52109c69-5219-38ef-2d79-fbf9a25d21fe/disk-0.vmdk due to incomplete transfer. [ 810.767641] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-dc93f508-e8cc-4de8-b78b-c28da02cf1c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.774350] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52109c69-5219-38ef-2d79-fbf9a25d21fe/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 810.774596] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Uploaded image e9a1cf7c-3c1c-4510-847f-fc1bacd2e5ca to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 810.776647] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 810.776932] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0de0596f-7cf6-4a90-a88b-50916e10e2b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.782468] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 810.782468] env[62627]: value = "task-2194180" [ 810.782468] env[62627]: _type = "Task" [ 810.782468] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.790649] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194180, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.881056] env[62627]: DEBUG oslo_vmware.api [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194179, 'name': PowerOffVM_Task, 'duration_secs': 0.277479} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.884223] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 810.884438] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 810.884712] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc557f7c-c253-4452-b3f3-836074cf05b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.891691] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d566c4-7bd2-c244-205a-e4eb7f5b6f76, 'name': SearchDatastore_Task, 'duration_secs': 0.023366} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.892470] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18eba44e-cb2d-49ad-9d9a-0c40f72607d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.897845] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 810.897845] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b4b0cb-70be-f24c-9b13-1be88b670893" [ 810.897845] env[62627]: _type = "Task" [ 810.897845] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.906359] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b4b0cb-70be-f24c-9b13-1be88b670893, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.915293] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d517dfa-8142-4373-9f60-e2b205da47d7 req-cdfeca1f-86ab-48ec-a9ca-4c2306ec6401 service nova] Releasing lock "refresh_cache-b1ba58c1-340f-486a-8f47-86fbc7b27485" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.934769] env[62627]: DEBUG nova.compute.manager [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 810.959014] env[62627]: DEBUG nova.virt.hardware [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 810.959279] env[62627]: DEBUG nova.virt.hardware [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 810.959433] env[62627]: DEBUG nova.virt.hardware [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 810.959614] env[62627]: DEBUG nova.virt.hardware [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 810.959754] env[62627]: DEBUG nova.virt.hardware [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 810.959898] env[62627]: DEBUG nova.virt.hardware [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 810.960125] env[62627]: DEBUG nova.virt.hardware [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 810.960288] env[62627]: DEBUG nova.virt.hardware [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 810.960478] env[62627]: DEBUG nova.virt.hardware [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 810.960664] env[62627]: DEBUG nova.virt.hardware [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 810.960837] env[62627]: DEBUG nova.virt.hardware [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 810.961791] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e36b50-48c0-4adf-9515-d5e576e004be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.965701] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 810.965856] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 810.966055] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Deleting the datastore file [datastore2] fffa23fe-3ce2-4879-b72d-11275d298504 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 810.966695] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e1a64bd-d25a-4ebb-bbe7-e32604cfb2c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.973923] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec53c0d0-15a9-43b5-89e0-b3af9fe66998 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.980764] env[62627]: DEBUG oslo_vmware.api [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for the task: (returnval){ [ 810.980764] env[62627]: value = "task-2194182" [ 810.980764] env[62627]: _type = "Task" [ 810.980764] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.007449] env[62627]: DEBUG oslo_vmware.api [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.011681] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.157248] env[62627]: DEBUG nova.scheduler.client.report [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 811.249992] env[62627]: DEBUG oslo_concurrency.lockutils [req-0f665ac7-1542-4ac8-a57d-e104eacb2ac0 req-eadefcdd-01a8-461b-8a03-082c04e6dbff service nova] Releasing lock "refresh_cache-fffa23fe-3ce2-4879-b72d-11275d298504" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.291887] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194180, 'name': Destroy_Task, 'duration_secs': 0.311059} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.292173] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Destroyed the VM [ 811.292502] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 811.292750] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-673625da-6988-4190-b6bd-1fd4154cc579 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.299443] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 811.299443] env[62627]: value = "task-2194183" [ 811.299443] env[62627]: _type = "Task" [ 811.299443] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.308393] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194183, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.407962] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b4b0cb-70be-f24c-9b13-1be88b670893, 'name': SearchDatastore_Task, 'duration_secs': 0.016478} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.408281] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.408532] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] b1ba58c1-340f-486a-8f47-86fbc7b27485/b1ba58c1-340f-486a-8f47-86fbc7b27485.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 811.408780] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4be6fd8-c533-499e-8bb8-34651ca79241 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.414741] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 811.414741] env[62627]: value = "task-2194184" [ 811.414741] env[62627]: _type = "Task" [ 811.414741] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.422199] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194184, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.490859] env[62627]: DEBUG oslo_vmware.api [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.642320] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquiring lock "16c4065b-83cf-4b1c-92bf-66ce0a75e573" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.642598] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "16c4065b-83cf-4b1c-92bf-66ce0a75e573" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.642819] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquiring lock "16c4065b-83cf-4b1c-92bf-66ce0a75e573-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.643009] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "16c4065b-83cf-4b1c-92bf-66ce0a75e573-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.643197] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "16c4065b-83cf-4b1c-92bf-66ce0a75e573-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.649492] env[62627]: INFO nova.compute.manager [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Terminating instance [ 811.663028] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.765s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.663614] env[62627]: DEBUG nova.compute.manager [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 811.667992] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.539s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.668357] env[62627]: DEBUG nova.objects.instance [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 811.720077] env[62627]: DEBUG nova.compute.manager [req-1a1d5a8c-7dfa-4c33-b4b7-dcb3dd759e57 req-fc7f4b45-aa47-43a7-baed-7697b5c98573 service nova] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Received event network-vif-plugged-bb608229-71bb-45a8-8815-5bb1cda6c5b0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 811.720077] env[62627]: DEBUG oslo_concurrency.lockutils [req-1a1d5a8c-7dfa-4c33-b4b7-dcb3dd759e57 req-fc7f4b45-aa47-43a7-baed-7697b5c98573 service nova] Acquiring lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.720077] env[62627]: DEBUG oslo_concurrency.lockutils [req-1a1d5a8c-7dfa-4c33-b4b7-dcb3dd759e57 req-fc7f4b45-aa47-43a7-baed-7697b5c98573 service nova] Lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.720077] env[62627]: DEBUG oslo_concurrency.lockutils [req-1a1d5a8c-7dfa-4c33-b4b7-dcb3dd759e57 req-fc7f4b45-aa47-43a7-baed-7697b5c98573 service nova] Lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.720077] env[62627]: DEBUG nova.compute.manager [req-1a1d5a8c-7dfa-4c33-b4b7-dcb3dd759e57 req-fc7f4b45-aa47-43a7-baed-7697b5c98573 service nova] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] No waiting events found dispatching network-vif-plugged-bb608229-71bb-45a8-8815-5bb1cda6c5b0 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 811.720845] env[62627]: WARNING nova.compute.manager [req-1a1d5a8c-7dfa-4c33-b4b7-dcb3dd759e57 req-fc7f4b45-aa47-43a7-baed-7697b5c98573 service nova] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Received unexpected event network-vif-plugged-bb608229-71bb-45a8-8815-5bb1cda6c5b0 for instance with vm_state building and task_state spawning. [ 811.724729] env[62627]: DEBUG nova.network.neutron [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Successfully updated port: bb608229-71bb-45a8-8815-5bb1cda6c5b0 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.751275] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "892fc84b-02a9-4fa0-81b5-80326a060c64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.752327] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "892fc84b-02a9-4fa0-81b5-80326a060c64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.811276] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194183, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.927554] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194184, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.996975] env[62627]: DEBUG oslo_vmware.api [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Task: {'id': task-2194182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.713605} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.996975] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 811.996975] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 811.996975] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 811.997255] env[62627]: INFO nova.compute.manager [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Took 1.65 seconds to destroy the instance on the hypervisor. [ 811.997470] env[62627]: DEBUG oslo.service.loopingcall [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 811.998790] env[62627]: DEBUG nova.compute.manager [-] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 811.998790] env[62627]: DEBUG nova.network.neutron [-] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 812.153648] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquiring lock "refresh_cache-16c4065b-83cf-4b1c-92bf-66ce0a75e573" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.153938] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquired lock "refresh_cache-16c4065b-83cf-4b1c-92bf-66ce0a75e573" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.155276] env[62627]: DEBUG nova.network.neutron [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 812.170131] env[62627]: DEBUG nova.compute.utils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 812.172162] env[62627]: DEBUG nova.compute.manager [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 812.172442] env[62627]: DEBUG nova.network.neutron [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 812.186821] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "83998771-97a7-4458-834e-df1e4b2914ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.187152] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "83998771-97a7-4458-834e-df1e4b2914ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.187439] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "83998771-97a7-4458-834e-df1e4b2914ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.187753] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "83998771-97a7-4458-834e-df1e4b2914ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.188104] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "83998771-97a7-4458-834e-df1e4b2914ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.192732] env[62627]: INFO nova.compute.manager [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Terminating instance [ 812.226466] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquiring lock "refresh_cache-8af5ca34-f9c4-4b68-b0f7-7163f388b778" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.226619] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquired lock "refresh_cache-8af5ca34-f9c4-4b68-b0f7-7163f388b778" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.226784] env[62627]: DEBUG nova.network.neutron [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 812.243888] env[62627]: DEBUG nova.policy [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1253a87d708448aba8d97fcabe137853', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c9263b20f714274a55b88a8bdbd5251', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 812.294434] env[62627]: DEBUG nova.compute.manager [req-f0bdf89a-890a-4d5b-bc29-717df82f5893 req-f9ba8535-f7a1-44fe-8a99-e1edfcf29180 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Received event network-changed-21d30d49-10fb-462d-9b5f-7525c3497178 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 812.294434] env[62627]: DEBUG nova.compute.manager [req-f0bdf89a-890a-4d5b-bc29-717df82f5893 req-f9ba8535-f7a1-44fe-8a99-e1edfcf29180 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Refreshing instance network info cache due to event network-changed-21d30d49-10fb-462d-9b5f-7525c3497178. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 812.294434] env[62627]: DEBUG oslo_concurrency.lockutils [req-f0bdf89a-890a-4d5b-bc29-717df82f5893 req-f9ba8535-f7a1-44fe-8a99-e1edfcf29180 service nova] Acquiring lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.294754] env[62627]: DEBUG oslo_concurrency.lockutils [req-f0bdf89a-890a-4d5b-bc29-717df82f5893 req-f9ba8535-f7a1-44fe-8a99-e1edfcf29180 service nova] Acquired lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.294754] env[62627]: DEBUG nova.network.neutron [req-f0bdf89a-890a-4d5b-bc29-717df82f5893 req-f9ba8535-f7a1-44fe-8a99-e1edfcf29180 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Refreshing network info cache for port 21d30d49-10fb-462d-9b5f-7525c3497178 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 812.312382] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194183, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.427524] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194184, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.770859} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.428086] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] b1ba58c1-340f-486a-8f47-86fbc7b27485/b1ba58c1-340f-486a-8f47-86fbc7b27485.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 812.428511] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 812.428867] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96a42884-53a1-470c-9925-02fa40fd3887 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.438159] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 812.438159] env[62627]: value = "task-2194185" [ 812.438159] env[62627]: _type = "Task" [ 812.438159] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.445078] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.681190] env[62627]: DEBUG nova.network.neutron [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.681190] env[62627]: DEBUG nova.compute.manager [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 812.684436] env[62627]: DEBUG oslo_concurrency.lockutils [None req-10f1c9c2-b175-4cf7-8f58-135b0aecdde9 tempest-ServersAdmin275Test-1891970148 tempest-ServersAdmin275Test-1891970148-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.692024] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.066s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.692024] env[62627]: INFO nova.compute.claims [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 812.696553] env[62627]: DEBUG nova.network.neutron [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Successfully created port: fc1db27a-489e-4bc7-a90d-1af91c74e4fd {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 812.699764] env[62627]: DEBUG nova.compute.manager [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 812.699973] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 812.700842] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b22ddb-b377-4504-b334-939bb2ac38a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.709044] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 812.709274] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c9fa39f-86a0-4ebd-aaf5-0fd58dd8f24c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.778216] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 812.778448] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 812.778650] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleting the datastore file [datastore2] 83998771-97a7-4458-834e-df1e4b2914ee {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 812.778929] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bf09d7a-4a53-4ef3-bb85-d39bd98a73cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.787049] env[62627]: DEBUG oslo_vmware.api [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 812.787049] env[62627]: value = "task-2194187" [ 812.787049] env[62627]: _type = "Task" [ 812.787049] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.806711] env[62627]: DEBUG oslo_vmware.api [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194187, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.808787] env[62627]: DEBUG nova.network.neutron [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.821093] env[62627]: DEBUG oslo_vmware.api [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194183, 'name': RemoveSnapshot_Task, 'duration_secs': 1.128094} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.821408] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 812.823120] env[62627]: INFO nova.compute.manager [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Took 14.18 seconds to snapshot the instance on the hypervisor. [ 812.852961] env[62627]: DEBUG nova.network.neutron [-] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.866965] env[62627]: DEBUG nova.network.neutron [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.889691] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.889884] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.948811] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072288} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.952036] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 812.952036] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28d6819-2558-4184-a2ec-983e87d32b20 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.974224] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] b1ba58c1-340f-486a-8f47-86fbc7b27485/b1ba58c1-340f-486a-8f47-86fbc7b27485.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.974533] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5295a467-4bba-4e99-8dd8-eae618d75f79 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.994861] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 812.994861] env[62627]: value = "task-2194188" [ 812.994861] env[62627]: _type = "Task" [ 812.994861] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.002943] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194188, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.297627] env[62627]: DEBUG oslo_vmware.api [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194187, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.349792} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.297926] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 813.298138] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 813.298317] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 813.298614] env[62627]: INFO nova.compute.manager [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Took 0.60 seconds to destroy the instance on the hypervisor. [ 813.298896] env[62627]: DEBUG oslo.service.loopingcall [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.299103] env[62627]: DEBUG nova.compute.manager [-] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 813.299203] env[62627]: DEBUG nova.network.neutron [-] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 813.326662] env[62627]: DEBUG nova.compute.manager [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Instance disappeared during snapshot {{(pid=62627) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 813.340423] env[62627]: DEBUG nova.compute.manager [None req-e36f63a7-11e0-4439-b16e-1d702ec48e64 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image not found during clean up e9a1cf7c-3c1c-4510-847f-fc1bacd2e5ca {{(pid=62627) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 813.354011] env[62627]: INFO nova.compute.manager [-] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Took 1.36 seconds to deallocate network for instance. [ 813.368203] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Releasing lock "refresh_cache-16c4065b-83cf-4b1c-92bf-66ce0a75e573" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.368681] env[62627]: DEBUG nova.compute.manager [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 813.368868] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 813.369752] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f44436-4fe5-4f1e-aa8f-811fead7b3ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.382292] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 813.382473] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fff9196a-04cc-4854-b3a4-b7fd4bf3a751 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.393083] env[62627]: DEBUG oslo_vmware.api [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 813.393083] env[62627]: value = "task-2194189" [ 813.393083] env[62627]: _type = "Task" [ 813.393083] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.400124] env[62627]: DEBUG oslo_vmware.api [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194189, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.464673] env[62627]: DEBUG nova.network.neutron [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Updating instance_info_cache with network_info: [{"id": "bb608229-71bb-45a8-8815-5bb1cda6c5b0", "address": "fa:16:3e:d8:fd:1a", "network": {"id": "0fb7571e-c31a-4c7f-9d1a-a647a8f3072b", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1705879723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376915f329c74e4585a7da4c636d1732", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb608229-71", "ovs_interfaceid": "bb608229-71bb-45a8-8815-5bb1cda6c5b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.507958] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194188, 'name': ReconfigVM_Task, 'duration_secs': 0.259014} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.508248] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Reconfigured VM instance instance-0000001b to attach disk [datastore1] b1ba58c1-340f-486a-8f47-86fbc7b27485/b1ba58c1-340f-486a-8f47-86fbc7b27485.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.510108] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dbe5073a-d7bf-4f2d-9525-5cdf2a4bfdc9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.518602] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 813.518602] env[62627]: value = "task-2194190" [ 813.518602] env[62627]: _type = "Task" [ 813.518602] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.529673] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194190, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.696483] env[62627]: DEBUG nova.network.neutron [req-f0bdf89a-890a-4d5b-bc29-717df82f5893 req-f9ba8535-f7a1-44fe-8a99-e1edfcf29180 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Updated VIF entry in instance network info cache for port 21d30d49-10fb-462d-9b5f-7525c3497178. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 813.696841] env[62627]: DEBUG nova.network.neutron [req-f0bdf89a-890a-4d5b-bc29-717df82f5893 req-f9ba8535-f7a1-44fe-8a99-e1edfcf29180 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Updating instance_info_cache with network_info: [{"id": "21d30d49-10fb-462d-9b5f-7525c3497178", "address": "fa:16:3e:9b:65:45", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d30d49-10", "ovs_interfaceid": "21d30d49-10fb-462d-9b5f-7525c3497178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.698778] env[62627]: DEBUG nova.compute.manager [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 813.730603] env[62627]: DEBUG nova.virt.hardware [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 813.730864] env[62627]: DEBUG nova.virt.hardware [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 813.731170] env[62627]: DEBUG nova.virt.hardware [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 813.731469] env[62627]: DEBUG nova.virt.hardware [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 813.731649] env[62627]: DEBUG nova.virt.hardware [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 813.731840] env[62627]: DEBUG nova.virt.hardware [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 813.732140] env[62627]: DEBUG nova.virt.hardware [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 813.732360] env[62627]: DEBUG nova.virt.hardware [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 813.732590] env[62627]: DEBUG nova.virt.hardware [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 813.732819] env[62627]: DEBUG nova.virt.hardware [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 813.733068] env[62627]: DEBUG nova.virt.hardware [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 813.734595] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c190d7-1d6b-4168-90e1-55f0a2b7228a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.749425] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb978085-d8a3-4be3-9d81-8e2ff5161820 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.803085] env[62627]: DEBUG nova.compute.manager [req-85af29ff-f10e-4d46-9c2f-7269a8157c86 req-3f64676a-9585-4fe9-9e8d-1c85c5f6f78b service nova] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Received event network-changed-bb608229-71bb-45a8-8815-5bb1cda6c5b0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 813.803532] env[62627]: DEBUG nova.compute.manager [req-85af29ff-f10e-4d46-9c2f-7269a8157c86 req-3f64676a-9585-4fe9-9e8d-1c85c5f6f78b service nova] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Refreshing instance network info cache due to event network-changed-bb608229-71bb-45a8-8815-5bb1cda6c5b0. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 813.803863] env[62627]: DEBUG oslo_concurrency.lockutils [req-85af29ff-f10e-4d46-9c2f-7269a8157c86 req-3f64676a-9585-4fe9-9e8d-1c85c5f6f78b service nova] Acquiring lock "refresh_cache-8af5ca34-f9c4-4b68-b0f7-7163f388b778" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.860229] env[62627]: DEBUG oslo_concurrency.lockutils [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.900566] env[62627]: DEBUG oslo_vmware.api [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194189, 'name': PowerOffVM_Task, 'duration_secs': 0.28555} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.903394] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 813.903590] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 813.904318] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43065fe9-0240-4d92-9399-62793cc42bf5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.931066] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 813.931350] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 813.931580] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Deleting the datastore file [datastore2] 16c4065b-83cf-4b1c-92bf-66ce0a75e573 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 813.931865] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7163a99-6188-4bb7-864a-b41bbfa00f84 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.937718] env[62627]: DEBUG oslo_vmware.api [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for the task: (returnval){ [ 813.937718] env[62627]: value = "task-2194192" [ 813.937718] env[62627]: _type = "Task" [ 813.937718] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.947814] env[62627]: DEBUG oslo_vmware.api [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194192, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.967756] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Releasing lock "refresh_cache-8af5ca34-f9c4-4b68-b0f7-7163f388b778" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.968073] env[62627]: DEBUG nova.compute.manager [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Instance network_info: |[{"id": "bb608229-71bb-45a8-8815-5bb1cda6c5b0", "address": "fa:16:3e:d8:fd:1a", "network": {"id": "0fb7571e-c31a-4c7f-9d1a-a647a8f3072b", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1705879723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376915f329c74e4585a7da4c636d1732", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb608229-71", "ovs_interfaceid": "bb608229-71bb-45a8-8815-5bb1cda6c5b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 813.968369] env[62627]: DEBUG oslo_concurrency.lockutils [req-85af29ff-f10e-4d46-9c2f-7269a8157c86 req-3f64676a-9585-4fe9-9e8d-1c85c5f6f78b service nova] Acquired lock "refresh_cache-8af5ca34-f9c4-4b68-b0f7-7163f388b778" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.968546] env[62627]: DEBUG nova.network.neutron [req-85af29ff-f10e-4d46-9c2f-7269a8157c86 req-3f64676a-9585-4fe9-9e8d-1c85c5f6f78b service nova] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Refreshing network info cache for port bb608229-71bb-45a8-8815-5bb1cda6c5b0 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 813.969990] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:fd:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ffcecdaa-a7b8-49fc-9371-dbdb7744688e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb608229-71bb-45a8-8815-5bb1cda6c5b0', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 813.977575] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Creating folder: Project (376915f329c74e4585a7da4c636d1732). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 813.982595] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c372d251-9193-4fe8-87ad-913d7a3b6a10 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.996630] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Created folder: Project (376915f329c74e4585a7da4c636d1732) in parent group-v447541. [ 813.996820] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Creating folder: Instances. Parent ref: group-v447617. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 813.997067] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aca75bfc-562e-4c30-a6d3-24972547be5c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.007384] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Created folder: Instances in parent group-v447617. [ 814.007603] env[62627]: DEBUG oslo.service.loopingcall [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 814.007789] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 814.007987] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-495cf91e-229d-4176-a571-df73c877a3ee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.035856] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194190, 'name': Rename_Task, 'duration_secs': 0.146883} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.037092] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 814.037325] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 814.037325] env[62627]: value = "task-2194195" [ 814.037325] env[62627]: _type = "Task" [ 814.037325] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.037561] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0e7a795-402f-4658-87da-d28399a04518 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.050965] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194195, 'name': CreateVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.052286] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 814.052286] env[62627]: value = "task-2194196" [ 814.052286] env[62627]: _type = "Task" [ 814.052286] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.061859] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.202888] env[62627]: DEBUG oslo_concurrency.lockutils [req-f0bdf89a-890a-4d5b-bc29-717df82f5893 req-f9ba8535-f7a1-44fe-8a99-e1edfcf29180 service nova] Releasing lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.259936] env[62627]: DEBUG nova.network.neutron [req-85af29ff-f10e-4d46-9c2f-7269a8157c86 req-3f64676a-9585-4fe9-9e8d-1c85c5f6f78b service nova] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Updated VIF entry in instance network info cache for port bb608229-71bb-45a8-8815-5bb1cda6c5b0. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 814.260316] env[62627]: DEBUG nova.network.neutron [req-85af29ff-f10e-4d46-9c2f-7269a8157c86 req-3f64676a-9585-4fe9-9e8d-1c85c5f6f78b service nova] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Updating instance_info_cache with network_info: [{"id": "bb608229-71bb-45a8-8815-5bb1cda6c5b0", "address": "fa:16:3e:d8:fd:1a", "network": {"id": "0fb7571e-c31a-4c7f-9d1a-a647a8f3072b", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1705879723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376915f329c74e4585a7da4c636d1732", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb608229-71", "ovs_interfaceid": "bb608229-71bb-45a8-8815-5bb1cda6c5b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.289732] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a07ab1b-2bc1-470b-bf46-068f2dda8d35 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.297616] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae196f3-4fca-4297-b19d-ce71b0eaeff0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.329215] env[62627]: DEBUG nova.network.neutron [-] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.331138] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759a6e31-a92e-4b5c-99cd-e3de0b1ca4ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.339689] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b44600-8061-4eaa-9e63-ea1effc90d07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.354832] env[62627]: DEBUG nova.compute.provider_tree [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.447323] env[62627]: DEBUG oslo_vmware.api [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Task: {'id': task-2194192, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224892} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.447590] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 814.447782] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 814.447954] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 814.448147] env[62627]: INFO nova.compute.manager [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Took 1.08 seconds to destroy the instance on the hypervisor. [ 814.448390] env[62627]: DEBUG oslo.service.loopingcall [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 814.448583] env[62627]: DEBUG nova.compute.manager [-] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 814.448683] env[62627]: DEBUG nova.network.neutron [-] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 814.465708] env[62627]: DEBUG nova.network.neutron [-] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.549234] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194195, 'name': CreateVM_Task, 'duration_secs': 0.431197} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.549234] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 814.550018] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.550240] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.550626] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 814.550887] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56e2ef19-3aab-4f97-b926-884155cabd4f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.560982] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 814.560982] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520706ed-03ad-30e6-dd50-887cd1ab5a7b" [ 814.560982] env[62627]: _type = "Task" [ 814.560982] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.564535] env[62627]: DEBUG oslo_vmware.api [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194196, 'name': PowerOnVM_Task, 'duration_secs': 0.501733} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.568652] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 814.568652] env[62627]: INFO nova.compute.manager [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Took 9.25 seconds to spawn the instance on the hypervisor. [ 814.568652] env[62627]: DEBUG nova.compute.manager [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 814.569201] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d86f39-2b87-4a7e-bdb3-2d06576470e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.576370] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520706ed-03ad-30e6-dd50-887cd1ab5a7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.763159] env[62627]: DEBUG oslo_concurrency.lockutils [req-85af29ff-f10e-4d46-9c2f-7269a8157c86 req-3f64676a-9585-4fe9-9e8d-1c85c5f6f78b service nova] Releasing lock "refresh_cache-8af5ca34-f9c4-4b68-b0f7-7163f388b778" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.763159] env[62627]: DEBUG nova.compute.manager [req-85af29ff-f10e-4d46-9c2f-7269a8157c86 req-3f64676a-9585-4fe9-9e8d-1c85c5f6f78b service nova] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Received event network-vif-deleted-4cb3b0a3-c2f5-4b36-a75a-b6d3b5e59dc3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 814.835049] env[62627]: INFO nova.compute.manager [-] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Took 1.54 seconds to deallocate network for instance. [ 814.858387] env[62627]: DEBUG nova.scheduler.client.report [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 814.968382] env[62627]: DEBUG nova.network.neutron [-] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.078054] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520706ed-03ad-30e6-dd50-887cd1ab5a7b, 'name': SearchDatastore_Task, 'duration_secs': 0.029876} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.078054] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.078054] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 815.078054] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.078185] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.078185] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 815.078185] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8eb996c3-3d4e-4cf3-84d5-8adfb8b4ccf6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.092520] env[62627]: DEBUG nova.network.neutron [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Successfully updated port: fc1db27a-489e-4bc7-a90d-1af91c74e4fd {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 815.097354] env[62627]: INFO nova.compute.manager [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Took 34.97 seconds to build instance. [ 815.103676] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 815.103871] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 815.105205] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dff34dd9-d668-413e-8c2d-ec61e469ad59 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.116996] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 815.116996] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52217299-c66d-a89b-7c9f-1d25bb22db48" [ 815.116996] env[62627]: _type = "Task" [ 815.116996] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.126071] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52217299-c66d-a89b-7c9f-1d25bb22db48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.341620] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.367481] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.677s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.367830] env[62627]: DEBUG nova.compute.manager [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 815.370802] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.714s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.370802] env[62627]: DEBUG nova.objects.instance [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lazy-loading 'resources' on Instance uuid 63e884c4-2cc1-431d-8a7f-1c4259b29e1a {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 815.472318] env[62627]: INFO nova.compute.manager [-] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Took 1.02 seconds to deallocate network for instance. [ 815.600316] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "refresh_cache-ea289de9-9b2d-4756-a9a5-0e3dfe8db772" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.600316] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "refresh_cache-ea289de9-9b2d-4756-a9a5-0e3dfe8db772" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.600316] env[62627]: DEBUG nova.network.neutron [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 815.604029] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7cbff2f5-6caa-476d-96f5-05191773e7f6 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "b1ba58c1-340f-486a-8f47-86fbc7b27485" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.093s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.627874] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52217299-c66d-a89b-7c9f-1d25bb22db48, 'name': SearchDatastore_Task, 'duration_secs': 0.024063} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.628701] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e21f49e2-1cef-4413-924b-0e055f725e80 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.634397] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 815.634397] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e26e21-fce0-256c-c2ab-35a5a0997958" [ 815.634397] env[62627]: _type = "Task" [ 815.634397] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.642580] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e26e21-fce0-256c-c2ab-35a5a0997958, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.832574] env[62627]: DEBUG nova.compute.manager [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Received event network-vif-deleted-33dc8116-8cea-41e8-8ac1-a7f42c7bca81 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 815.832729] env[62627]: DEBUG nova.compute.manager [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Received event network-vif-plugged-fc1db27a-489e-4bc7-a90d-1af91c74e4fd {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 815.832967] env[62627]: DEBUG oslo_concurrency.lockutils [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] Acquiring lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.833096] env[62627]: DEBUG oslo_concurrency.lockutils [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] Lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.833267] env[62627]: DEBUG oslo_concurrency.lockutils [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] Lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.833446] env[62627]: DEBUG nova.compute.manager [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] No waiting events found dispatching network-vif-plugged-fc1db27a-489e-4bc7-a90d-1af91c74e4fd {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 815.833640] env[62627]: WARNING nova.compute.manager [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Received unexpected event network-vif-plugged-fc1db27a-489e-4bc7-a90d-1af91c74e4fd for instance with vm_state building and task_state spawning. [ 815.833736] env[62627]: DEBUG nova.compute.manager [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Received event network-changed-fc1db27a-489e-4bc7-a90d-1af91c74e4fd {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 815.834268] env[62627]: DEBUG nova.compute.manager [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Refreshing instance network info cache due to event network-changed-fc1db27a-489e-4bc7-a90d-1af91c74e4fd. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 815.834268] env[62627]: DEBUG oslo_concurrency.lockutils [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] Acquiring lock "refresh_cache-ea289de9-9b2d-4756-a9a5-0e3dfe8db772" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.874031] env[62627]: DEBUG nova.compute.utils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 815.875333] env[62627]: DEBUG nova.compute.manager [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 815.875991] env[62627]: DEBUG nova.network.neutron [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 815.918945] env[62627]: DEBUG nova.policy [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8b54f1fd077459f882ca14114733548', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca5af160d3f145c28f348d6148af4db8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 815.978272] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.038708] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.038935] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.106888] env[62627]: DEBUG nova.compute.manager [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 816.145994] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e26e21-fce0-256c-c2ab-35a5a0997958, 'name': SearchDatastore_Task, 'duration_secs': 0.010957} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.149523] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.149817] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 8af5ca34-f9c4-4b68-b0f7-7163f388b778/8af5ca34-f9c4-4b68-b0f7-7163f388b778.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 816.150287] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de8096e1-1247-45ff-94c3-f2d55b88a180 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.163141] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 816.163141] env[62627]: value = "task-2194197" [ 816.163141] env[62627]: _type = "Task" [ 816.163141] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.170204] env[62627]: DEBUG nova.network.neutron [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 816.177890] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.381098] env[62627]: DEBUG nova.compute.manager [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 816.391061] env[62627]: DEBUG nova.network.neutron [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Successfully created port: d126d7bd-d076-4411-8929-af4270455b6d {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 816.440178] env[62627]: DEBUG nova.network.neutron [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Updating instance_info_cache with network_info: [{"id": "fc1db27a-489e-4bc7-a90d-1af91c74e4fd", "address": "fa:16:3e:7c:a5:a2", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc1db27a-48", "ovs_interfaceid": "fc1db27a-489e-4bc7-a90d-1af91c74e4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.534650] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f29aa6-722f-4ab4-a1c6-57816c820cf3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.546323] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12326092-8eed-4b33-be84-5867fb80b2dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.578925] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5d5dcc-13b0-4c3f-9504-b9669fd9adcd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.587600] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c234d052-ffb8-41ac-8546-5ea15e14faa0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.605679] env[62627]: DEBUG nova.compute.provider_tree [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.631692] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.676534] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194197, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464612} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.676534] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 8af5ca34-f9c4-4b68-b0f7-7163f388b778/8af5ca34-f9c4-4b68-b0f7-7163f388b778.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 816.676534] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 816.676534] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74702b4d-4ee0-4243-a42f-01f596257cd7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.682206] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 816.682206] env[62627]: value = "task-2194198" [ 816.682206] env[62627]: _type = "Task" [ 816.682206] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.689768] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194198, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.943065] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "refresh_cache-ea289de9-9b2d-4756-a9a5-0e3dfe8db772" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.943262] env[62627]: DEBUG nova.compute.manager [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Instance network_info: |[{"id": "fc1db27a-489e-4bc7-a90d-1af91c74e4fd", "address": "fa:16:3e:7c:a5:a2", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc1db27a-48", "ovs_interfaceid": "fc1db27a-489e-4bc7-a90d-1af91c74e4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 816.943619] env[62627]: DEBUG oslo_concurrency.lockutils [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] Acquired lock "refresh_cache-ea289de9-9b2d-4756-a9a5-0e3dfe8db772" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.943832] env[62627]: DEBUG nova.network.neutron [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Refreshing network info cache for port fc1db27a-489e-4bc7-a90d-1af91c74e4fd {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 816.945512] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:a5:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99be9a5e-b3f9-4e6c-83d5-df11f817847d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc1db27a-489e-4bc7-a90d-1af91c74e4fd', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 816.954040] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Creating folder: Project (6c9263b20f714274a55b88a8bdbd5251). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 816.955059] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-253a7699-1539-424b-8279-a6eea47b240c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.967174] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Created folder: Project (6c9263b20f714274a55b88a8bdbd5251) in parent group-v447541. [ 816.967366] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Creating folder: Instances. Parent ref: group-v447620. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 816.967593] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec6c85c7-dabc-489d-a6e7-f36fe939a419 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.976417] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Created folder: Instances in parent group-v447620. [ 816.976668] env[62627]: DEBUG oslo.service.loopingcall [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 816.976874] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 816.977083] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-323b0e21-e317-486c-b95d-2381028ac331 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.996037] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 816.996037] env[62627]: value = "task-2194201" [ 816.996037] env[62627]: _type = "Task" [ 816.996037] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.007099] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194201, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.109241] env[62627]: DEBUG nova.scheduler.client.report [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 817.191888] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194198, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068321} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.192199] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 817.192997] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f67de1e-6d27-414c-97cc-d4b8fded878a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.218477] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 8af5ca34-f9c4-4b68-b0f7-7163f388b778/8af5ca34-f9c4-4b68-b0f7-7163f388b778.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 817.218793] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c8653f4-85ad-4b08-9dd9-3db407bd6685 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.238054] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 817.238054] env[62627]: value = "task-2194202" [ 817.238054] env[62627]: _type = "Task" [ 817.238054] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.246414] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194202, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.393882] env[62627]: DEBUG nova.compute.manager [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 817.420521] env[62627]: DEBUG nova.virt.hardware [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 817.420769] env[62627]: DEBUG nova.virt.hardware [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.420936] env[62627]: DEBUG nova.virt.hardware [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 817.421139] env[62627]: DEBUG nova.virt.hardware [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.421286] env[62627]: DEBUG nova.virt.hardware [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 817.421463] env[62627]: DEBUG nova.virt.hardware [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 817.421684] env[62627]: DEBUG nova.virt.hardware [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 817.421845] env[62627]: DEBUG nova.virt.hardware [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 817.422036] env[62627]: DEBUG nova.virt.hardware [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 817.422211] env[62627]: DEBUG nova.virt.hardware [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 817.422372] env[62627]: DEBUG nova.virt.hardware [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 817.423300] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbda099-44b2-4860-91eb-520d8d22d692 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.431751] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8dab6dc-da78-44e8-8b14-1b4f35927754 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.505390] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194201, 'name': CreateVM_Task, 'duration_secs': 0.342791} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.506264] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 817.506345] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.507199] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.507199] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 817.507199] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-554dca6d-19aa-4ad5-80ed-c82db74eb959 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.511732] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 817.511732] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529b2c88-fd1c-700b-a473-b55990c658b1" [ 817.511732] env[62627]: _type = "Task" [ 817.511732] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.519296] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529b2c88-fd1c-700b-a473-b55990c658b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.614201] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.244s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.616311] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.593s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.617823] env[62627]: INFO nova.compute.claims [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.645904] env[62627]: INFO nova.scheduler.client.report [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Deleted allocations for instance 63e884c4-2cc1-431d-8a7f-1c4259b29e1a [ 817.703115] env[62627]: DEBUG nova.network.neutron [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Updated VIF entry in instance network info cache for port fc1db27a-489e-4bc7-a90d-1af91c74e4fd. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 817.703435] env[62627]: DEBUG nova.network.neutron [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Updating instance_info_cache with network_info: [{"id": "fc1db27a-489e-4bc7-a90d-1af91c74e4fd", "address": "fa:16:3e:7c:a5:a2", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc1db27a-48", "ovs_interfaceid": "fc1db27a-489e-4bc7-a90d-1af91c74e4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.749215] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194202, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.945314] env[62627]: DEBUG nova.compute.manager [req-7ddad513-11b7-406c-a6be-962301c16dc6 req-c61b41dc-292b-46aa-850b-d918ec5d2eb3 service nova] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Received event network-vif-plugged-d126d7bd-d076-4411-8929-af4270455b6d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 817.946023] env[62627]: DEBUG oslo_concurrency.lockutils [req-7ddad513-11b7-406c-a6be-962301c16dc6 req-c61b41dc-292b-46aa-850b-d918ec5d2eb3 service nova] Acquiring lock "95ac4fbf-071a-403e-8cae-88d9899757ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.946023] env[62627]: DEBUG oslo_concurrency.lockutils [req-7ddad513-11b7-406c-a6be-962301c16dc6 req-c61b41dc-292b-46aa-850b-d918ec5d2eb3 service nova] Lock "95ac4fbf-071a-403e-8cae-88d9899757ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.946023] env[62627]: DEBUG oslo_concurrency.lockutils [req-7ddad513-11b7-406c-a6be-962301c16dc6 req-c61b41dc-292b-46aa-850b-d918ec5d2eb3 service nova] Lock "95ac4fbf-071a-403e-8cae-88d9899757ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.950018] env[62627]: DEBUG nova.compute.manager [req-7ddad513-11b7-406c-a6be-962301c16dc6 req-c61b41dc-292b-46aa-850b-d918ec5d2eb3 service nova] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] No waiting events found dispatching network-vif-plugged-d126d7bd-d076-4411-8929-af4270455b6d {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 817.950018] env[62627]: WARNING nova.compute.manager [req-7ddad513-11b7-406c-a6be-962301c16dc6 req-c61b41dc-292b-46aa-850b-d918ec5d2eb3 service nova] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Received unexpected event network-vif-plugged-d126d7bd-d076-4411-8929-af4270455b6d for instance with vm_state building and task_state spawning. [ 818.022088] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529b2c88-fd1c-700b-a473-b55990c658b1, 'name': SearchDatastore_Task, 'duration_secs': 0.046278} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.022389] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.022626] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 818.022847] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.023060] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.023178] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 818.023466] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96629591-9d41-4b40-a36c-c09daef5d4c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.036908] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 818.037111] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 818.037835] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64371565-137e-44e9-ba04-485308329474 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.042747] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 818.042747] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e0f5f0-3458-51a4-04cf-46669f6432e5" [ 818.042747] env[62627]: _type = "Task" [ 818.042747] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.050194] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e0f5f0-3458-51a4-04cf-46669f6432e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.070670] env[62627]: DEBUG nova.network.neutron [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Successfully updated port: d126d7bd-d076-4411-8929-af4270455b6d {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 818.156116] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ddd0248c-dc8d-4154-a2a1-aa2a8e3b3e61 tempest-ServersAdmin275Test-2143909398 tempest-ServersAdmin275Test-2143909398-project-member] Lock "63e884c4-2cc1-431d-8a7f-1c4259b29e1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.791s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.205905] env[62627]: DEBUG oslo_concurrency.lockutils [req-f5be4d76-edb8-481c-b1e9-9785473f70a5 req-711c46ba-9224-4a00-8810-e2008ca6c403 service nova] Releasing lock "refresh_cache-ea289de9-9b2d-4756-a9a5-0e3dfe8db772" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.249023] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194202, 'name': ReconfigVM_Task, 'duration_secs': 0.514036} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.249308] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 8af5ca34-f9c4-4b68-b0f7-7163f388b778/8af5ca34-f9c4-4b68-b0f7-7163f388b778.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 818.249990] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a14b53e0-005d-43bf-a827-a258da85a69f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.256764] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 818.256764] env[62627]: value = "task-2194203" [ 818.256764] env[62627]: _type = "Task" [ 818.256764] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.265818] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194203, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.553661] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e0f5f0-3458-51a4-04cf-46669f6432e5, 'name': SearchDatastore_Task, 'duration_secs': 0.008483} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.554845] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cb22042-0b60-409c-b755-25b475affc57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.560277] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 818.560277] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d15b0e-0a25-2f79-0e46-63d4ad9c0ec7" [ 818.560277] env[62627]: _type = "Task" [ 818.560277] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.568342] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d15b0e-0a25-2f79-0e46-63d4ad9c0ec7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.574618] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquiring lock "refresh_cache-95ac4fbf-071a-403e-8cae-88d9899757ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.574768] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquired lock "refresh_cache-95ac4fbf-071a-403e-8cae-88d9899757ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.574963] env[62627]: DEBUG nova.network.neutron [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 818.771540] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194203, 'name': Rename_Task, 'duration_secs': 0.143392} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.773964] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 818.774394] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ecb1c69-c834-4c01-86de-e66a27192d81 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.781229] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 818.781229] env[62627]: value = "task-2194204" [ 818.781229] env[62627]: _type = "Task" [ 818.781229] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.798775] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194204, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.072614] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d15b0e-0a25-2f79-0e46-63d4ad9c0ec7, 'name': SearchDatastore_Task, 'duration_secs': 0.009399} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.072872] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.073140] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] ea289de9-9b2d-4756-a9a5-0e3dfe8db772/ea289de9-9b2d-4756-a9a5-0e3dfe8db772.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 819.073413] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f910e22-c64c-47ba-b3e6-8eafca8b5290 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.083152] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 819.083152] env[62627]: value = "task-2194205" [ 819.083152] env[62627]: _type = "Task" [ 819.083152] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.091324] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.115328] env[62627]: DEBUG nova.network.neutron [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.141181] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c789b612-34f0-45aa-809c-0f19120811d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.149244] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0cfcec-8d0f-41e1-9828-b4f9e2c98b3d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.184043] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae05475b-0727-4c2d-89ea-fa01beccf574 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.193598] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8971721-b6a2-4245-b27f-86c60bd4726c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.209555] env[62627]: DEBUG nova.compute.provider_tree [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.292668] env[62627]: DEBUG oslo_vmware.api [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194204, 'name': PowerOnVM_Task, 'duration_secs': 0.416771} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.293404] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 819.293404] env[62627]: INFO nova.compute.manager [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Took 8.36 seconds to spawn the instance on the hypervisor. [ 819.293404] env[62627]: DEBUG nova.compute.manager [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 819.294240] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566eee41-1036-47fe-9a16-75d6618d7f48 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.298260] env[62627]: DEBUG nova.network.neutron [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Updating instance_info_cache with network_info: [{"id": "d126d7bd-d076-4411-8929-af4270455b6d", "address": "fa:16:3e:fa:e0:12", "network": {"id": "2a762f9b-02b6-4395-a13e-478e9168cbbe", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1933862077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5af160d3f145c28f348d6148af4db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd126d7bd-d0", "ovs_interfaceid": "d126d7bd-d076-4411-8929-af4270455b6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.595104] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194205, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481088} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.595682] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] ea289de9-9b2d-4756-a9a5-0e3dfe8db772/ea289de9-9b2d-4756-a9a5-0e3dfe8db772.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 819.596129] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 819.596463] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8075c745-5105-4e08-89c9-4c3af0bd6afa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.604160] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 819.604160] env[62627]: value = "task-2194206" [ 819.604160] env[62627]: _type = "Task" [ 819.604160] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.610905] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194206, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.712723] env[62627]: DEBUG nova.scheduler.client.report [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 819.800634] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Releasing lock "refresh_cache-95ac4fbf-071a-403e-8cae-88d9899757ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.800996] env[62627]: DEBUG nova.compute.manager [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Instance network_info: |[{"id": "d126d7bd-d076-4411-8929-af4270455b6d", "address": "fa:16:3e:fa:e0:12", "network": {"id": "2a762f9b-02b6-4395-a13e-478e9168cbbe", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1933862077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5af160d3f145c28f348d6148af4db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd126d7bd-d0", "ovs_interfaceid": "d126d7bd-d076-4411-8929-af4270455b6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 819.801480] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:e0:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15165046-2de9-4ada-9e99-0126e20854a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd126d7bd-d076-4411-8929-af4270455b6d', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 819.812706] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Creating folder: Project (ca5af160d3f145c28f348d6148af4db8). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 819.823407] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-671a5ba0-85f7-46db-9e05-c9611351e612 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.830058] env[62627]: INFO nova.compute.manager [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Took 37.80 seconds to build instance. [ 819.840232] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Created folder: Project (ca5af160d3f145c28f348d6148af4db8) in parent group-v447541. [ 819.840232] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Creating folder: Instances. Parent ref: group-v447623. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 819.840839] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13aa2d4c-3cc2-4da9-9e51-5b7b51d4a4b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.850580] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Created folder: Instances in parent group-v447623. [ 819.850580] env[62627]: DEBUG oslo.service.loopingcall [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 819.850746] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 819.851048] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6343904d-4ed9-450f-9841-ada0260d03a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.871482] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 819.871482] env[62627]: value = "task-2194209" [ 819.871482] env[62627]: _type = "Task" [ 819.871482] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.880044] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194209, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.112417] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194206, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060186} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.112643] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 820.113525] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f07ef70-4096-4de3-b4ce-ec45fb3eddbb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.138100] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] ea289de9-9b2d-4756-a9a5-0e3dfe8db772/ea289de9-9b2d-4756-a9a5-0e3dfe8db772.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 820.138404] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e749239-4f69-4077-a4b6-8bb1974e6703 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.160450] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 820.160450] env[62627]: value = "task-2194210" [ 820.160450] env[62627]: _type = "Task" [ 820.160450] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.169471] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194210, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.215764] env[62627]: DEBUG nova.compute.manager [req-ed001d86-8ce9-4c3d-8714-cfca9a42d48c req-8c8d8caf-f493-4083-8551-56c24cc0bff9 service nova] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Received event network-changed-d126d7bd-d076-4411-8929-af4270455b6d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 820.216058] env[62627]: DEBUG nova.compute.manager [req-ed001d86-8ce9-4c3d-8714-cfca9a42d48c req-8c8d8caf-f493-4083-8551-56c24cc0bff9 service nova] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Refreshing instance network info cache due to event network-changed-d126d7bd-d076-4411-8929-af4270455b6d. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 820.218465] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed001d86-8ce9-4c3d-8714-cfca9a42d48c req-8c8d8caf-f493-4083-8551-56c24cc0bff9 service nova] Acquiring lock "refresh_cache-95ac4fbf-071a-403e-8cae-88d9899757ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.218465] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed001d86-8ce9-4c3d-8714-cfca9a42d48c req-8c8d8caf-f493-4083-8551-56c24cc0bff9 service nova] Acquired lock "refresh_cache-95ac4fbf-071a-403e-8cae-88d9899757ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.218465] env[62627]: DEBUG nova.network.neutron [req-ed001d86-8ce9-4c3d-8714-cfca9a42d48c req-8c8d8caf-f493-4083-8551-56c24cc0bff9 service nova] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Refreshing network info cache for port d126d7bd-d076-4411-8929-af4270455b6d {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.220242] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.604s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.224027] env[62627]: DEBUG nova.compute.manager [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 820.224027] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.009s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.224027] env[62627]: DEBUG nova.objects.instance [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lazy-loading 'resources' on Instance uuid 15799515-ab7d-4e7c-a6e0-c740baf4f93a {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 820.334812] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a76a3021-e9f2-477c-ad21-6987fce750ab tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.020s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.385027] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194209, 'name': CreateVM_Task, 'duration_secs': 0.347175} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.385218] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 820.385886] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.386080] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.386415] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 820.386660] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9a65859-1d1c-4b82-a883-79ece22b2e04 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.391927] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for the task: (returnval){ [ 820.391927] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dae517-7e40-2e0d-9fc3-2d15e6e4bf14" [ 820.391927] env[62627]: _type = "Task" [ 820.391927] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.402384] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dae517-7e40-2e0d-9fc3-2d15e6e4bf14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.672698] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194210, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.730378] env[62627]: DEBUG nova.compute.utils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 820.735223] env[62627]: DEBUG nova.compute.manager [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 820.735223] env[62627]: DEBUG nova.network.neutron [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 820.837068] env[62627]: DEBUG nova.compute.manager [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 820.878771] env[62627]: DEBUG nova.policy [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ccb8d9c080cb4714a3de5547a2dbd753', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f0d1cf389f041c690185c2b2399855e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 820.916373] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dae517-7e40-2e0d-9fc3-2d15e6e4bf14, 'name': SearchDatastore_Task, 'duration_secs': 0.050216} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.916373] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.916373] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 820.916373] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.916587] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.916587] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.916587] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a848a860-ea62-4cf6-8901-39f67fd4428a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.927230] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.927420] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 820.928577] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45112774-318b-4c33-93ef-c07faf15591c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.936533] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for the task: (returnval){ [ 820.936533] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5298dec9-22da-e829-5fa6-d283b6082a2f" [ 820.936533] env[62627]: _type = "Task" [ 820.936533] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.944128] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5298dec9-22da-e829-5fa6-d283b6082a2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.105259] env[62627]: DEBUG nova.network.neutron [req-ed001d86-8ce9-4c3d-8714-cfca9a42d48c req-8c8d8caf-f493-4083-8551-56c24cc0bff9 service nova] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Updated VIF entry in instance network info cache for port d126d7bd-d076-4411-8929-af4270455b6d. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 821.105259] env[62627]: DEBUG nova.network.neutron [req-ed001d86-8ce9-4c3d-8714-cfca9a42d48c req-8c8d8caf-f493-4083-8551-56c24cc0bff9 service nova] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Updating instance_info_cache with network_info: [{"id": "d126d7bd-d076-4411-8929-af4270455b6d", "address": "fa:16:3e:fa:e0:12", "network": {"id": "2a762f9b-02b6-4395-a13e-478e9168cbbe", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1933862077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5af160d3f145c28f348d6148af4db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15165046-2de9-4ada-9e99-0126e20854a9", "external-id": "nsx-vlan-transportzone-974", "segmentation_id": 974, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd126d7bd-d0", "ovs_interfaceid": "d126d7bd-d076-4411-8929-af4270455b6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.179051] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194210, 'name': ReconfigVM_Task, 'duration_secs': 0.864906} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.179051] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Reconfigured VM instance instance-0000001d to attach disk [datastore2] ea289de9-9b2d-4756-a9a5-0e3dfe8db772/ea289de9-9b2d-4756-a9a5-0e3dfe8db772.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 821.179226] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d5e62c3-a837-4ec1-beca-7d4b3bccfb22 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.190189] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 821.190189] env[62627]: value = "task-2194211" [ 821.190189] env[62627]: _type = "Task" [ 821.190189] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.200379] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194211, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.235788] env[62627]: DEBUG nova.compute.manager [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 821.236317] env[62627]: DEBUG nova.compute.manager [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 821.239695] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebdeab5-0c93-4fbb-ac6e-7314e3e597b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.357186] env[62627]: DEBUG nova.network.neutron [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Successfully created port: 3b9cf75c-dd86-44d1-9123-1680cabc589d {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.366052] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.451007] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5298dec9-22da-e829-5fa6-d283b6082a2f, 'name': SearchDatastore_Task, 'duration_secs': 0.0092} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.452140] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7bb2b00-07e0-49e0-a779-8734ecf35773 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.464735] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for the task: (returnval){ [ 821.464735] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522159c4-76b5-6e81-ccd2-bdce8e5bef08" [ 821.464735] env[62627]: _type = "Task" [ 821.464735] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.474396] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522159c4-76b5-6e81-ccd2-bdce8e5bef08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.511191] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45c3f63-5b4a-4940-9483-8422c3c0a0e0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.518868] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c01d93-604d-4f26-bb73-2ec51c4db791 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.552591] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffdb714-36ed-489e-ad2d-aff630df2ace {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.560112] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75d62d8-f113-44fd-a13e-75951be8680f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.574239] env[62627]: DEBUG nova.compute.provider_tree [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 821.607260] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed001d86-8ce9-4c3d-8714-cfca9a42d48c req-8c8d8caf-f493-4083-8551-56c24cc0bff9 service nova] Releasing lock "refresh_cache-95ac4fbf-071a-403e-8cae-88d9899757ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.706037] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194211, 'name': Rename_Task, 'duration_secs': 0.323478} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.706037] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 821.706037] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76f97a49-5558-4fcc-b293-b476b612c704 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.709809] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 821.709809] env[62627]: value = "task-2194212" [ 821.709809] env[62627]: _type = "Task" [ 821.709809] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.722309] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194212, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.768136] env[62627]: INFO nova.compute.manager [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] instance snapshotting [ 821.771204] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e479d35a-850a-4955-84e8-2af41c677d1a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.792755] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d51692b-7743-438e-be8c-7787fbd3e549 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.978105] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522159c4-76b5-6e81-ccd2-bdce8e5bef08, 'name': SearchDatastore_Task, 'duration_secs': 0.017335} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.978397] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.978673] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 95ac4fbf-071a-403e-8cae-88d9899757ba/95ac4fbf-071a-403e-8cae-88d9899757ba.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 821.978950] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53f3dbad-da77-4867-a328-caa52b5e290c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.987561] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for the task: (returnval){ [ 821.987561] env[62627]: value = "task-2194213" [ 821.987561] env[62627]: _type = "Task" [ 821.987561] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.998206] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194213, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.106559] env[62627]: ERROR nova.scheduler.client.report [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [req-1408ddcc-7b63-4764-bddf-d2c728c6229e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1408ddcc-7b63-4764-bddf-d2c728c6229e"}]} [ 822.129021] env[62627]: DEBUG nova.scheduler.client.report [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 822.139243] env[62627]: DEBUG nova.scheduler.client.report [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 822.139547] env[62627]: DEBUG nova.compute.provider_tree [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 822.151507] env[62627]: DEBUG nova.scheduler.client.report [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 822.171409] env[62627]: DEBUG nova.scheduler.client.report [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 822.219932] env[62627]: DEBUG oslo_vmware.api [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194212, 'name': PowerOnVM_Task, 'duration_secs': 0.456755} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.223280] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 822.223280] env[62627]: INFO nova.compute.manager [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Took 8.52 seconds to spawn the instance on the hypervisor. [ 822.223280] env[62627]: DEBUG nova.compute.manager [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 822.225155] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0d39db-ccc9-443b-830e-0602b3fd6abf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.261896] env[62627]: DEBUG nova.compute.manager [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 822.294599] env[62627]: DEBUG nova.virt.hardware [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 822.294855] env[62627]: DEBUG nova.virt.hardware [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.295017] env[62627]: DEBUG nova.virt.hardware [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 822.295200] env[62627]: DEBUG nova.virt.hardware [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.295345] env[62627]: DEBUG nova.virt.hardware [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 822.295493] env[62627]: DEBUG nova.virt.hardware [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 822.295704] env[62627]: DEBUG nova.virt.hardware [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 822.295867] env[62627]: DEBUG nova.virt.hardware [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 822.296161] env[62627]: DEBUG nova.virt.hardware [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 822.296302] env[62627]: DEBUG nova.virt.hardware [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 822.296474] env[62627]: DEBUG nova.virt.hardware [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 822.297719] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1664cbc-3f69-41b8-87bd-7a2958bdcf2d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.307650] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 822.307996] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-04cccbc2-b227-4c65-9f2f-c3d3ef51b568 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.314490] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e02c8b0-f44d-41b1-95df-6680d877d656 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.323792] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 822.323792] env[62627]: value = "task-2194214" [ 822.323792] env[62627]: _type = "Task" [ 822.323792] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.345564] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194214, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.504093] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194213, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513304} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.504093] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 95ac4fbf-071a-403e-8cae-88d9899757ba/95ac4fbf-071a-403e-8cae-88d9899757ba.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 822.504356] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 822.506347] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-080d661c-922a-4239-b394-428a353e5b4d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.515381] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for the task: (returnval){ [ 822.515381] env[62627]: value = "task-2194215" [ 822.515381] env[62627]: _type = "Task" [ 822.515381] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.528207] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.729456] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquiring lock "c9088ac9-456b-4c92-8773-79c263e7cc2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.729744] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Lock "c9088ac9-456b-4c92-8773-79c263e7cc2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.749253] env[62627]: INFO nova.compute.manager [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Took 39.66 seconds to build instance. [ 822.835344] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22ae13d-3d86-42af-b305-e2dce87b346e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.846082] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194214, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.849366] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fbfbbb-71ac-48da-8677-39c2dab77fbd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.886275] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353abdc5-ce44-4533-ad5d-4c2e2b81a844 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.894374] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8c8abf-e463-4e7f-b96e-6acec49f341e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.909717] env[62627]: DEBUG nova.compute.provider_tree [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.921040] env[62627]: DEBUG nova.compute.manager [req-7190c9c9-de80-4a24-b53b-95b2b1227d5e req-17643e43-995f-4f23-8593-fec3b10b28c8 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Received event network-vif-plugged-3b9cf75c-dd86-44d1-9123-1680cabc589d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 822.921262] env[62627]: DEBUG oslo_concurrency.lockutils [req-7190c9c9-de80-4a24-b53b-95b2b1227d5e req-17643e43-995f-4f23-8593-fec3b10b28c8 service nova] Acquiring lock "cbd30c37-f6c3-4229-8b95-855cab64924c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.921466] env[62627]: DEBUG oslo_concurrency.lockutils [req-7190c9c9-de80-4a24-b53b-95b2b1227d5e req-17643e43-995f-4f23-8593-fec3b10b28c8 service nova] Lock "cbd30c37-f6c3-4229-8b95-855cab64924c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.921634] env[62627]: DEBUG oslo_concurrency.lockutils [req-7190c9c9-de80-4a24-b53b-95b2b1227d5e req-17643e43-995f-4f23-8593-fec3b10b28c8 service nova] Lock "cbd30c37-f6c3-4229-8b95-855cab64924c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.921799] env[62627]: DEBUG nova.compute.manager [req-7190c9c9-de80-4a24-b53b-95b2b1227d5e req-17643e43-995f-4f23-8593-fec3b10b28c8 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] No waiting events found dispatching network-vif-plugged-3b9cf75c-dd86-44d1-9123-1680cabc589d {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 822.921958] env[62627]: WARNING nova.compute.manager [req-7190c9c9-de80-4a24-b53b-95b2b1227d5e req-17643e43-995f-4f23-8593-fec3b10b28c8 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Received unexpected event network-vif-plugged-3b9cf75c-dd86-44d1-9123-1680cabc589d for instance with vm_state building and task_state spawning. [ 823.028562] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067086} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.028830] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 823.030214] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb70f228-afa2-4784-a8bd-b3150885a152 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.053643] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 95ac4fbf-071a-403e-8cae-88d9899757ba/95ac4fbf-071a-403e-8cae-88d9899757ba.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.054475] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1040238-dfd6-43f5-ade2-840ba51d82be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.069354] env[62627]: DEBUG nova.network.neutron [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Successfully updated port: 3b9cf75c-dd86-44d1-9123-1680cabc589d {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 823.076581] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for the task: (returnval){ [ 823.076581] env[62627]: value = "task-2194216" [ 823.076581] env[62627]: _type = "Task" [ 823.076581] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.085212] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194216, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.250626] env[62627]: DEBUG oslo_concurrency.lockutils [None req-005b4c84-2d30-4fb5-b4d3-368c1e7ef9cc tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.337396] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194214, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.412076] env[62627]: DEBUG nova.scheduler.client.report [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.573099] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "refresh_cache-cbd30c37-f6c3-4229-8b95-855cab64924c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.573099] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquired lock "refresh_cache-cbd30c37-f6c3-4229-8b95-855cab64924c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.573099] env[62627]: DEBUG nova.network.neutron [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.595278] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194216, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.596729] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.596993] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.597240] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.597428] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.597598] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.600026] env[62627]: INFO nova.compute.manager [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Terminating instance [ 823.753038] env[62627]: DEBUG nova.compute.manager [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 823.837713] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194214, 'name': CreateSnapshot_Task, 'duration_secs': 1.385409} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.838153] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 823.839078] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fa76f2-ec1f-48e7-ac1e-7a9a978553ea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.917925] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.694s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.922798] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.509s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.922798] env[62627]: INFO nova.compute.claims [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.945484] env[62627]: INFO nova.scheduler.client.report [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Deleted allocations for instance 15799515-ab7d-4e7c-a6e0-c740baf4f93a [ 824.090062] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194216, 'name': ReconfigVM_Task, 'duration_secs': 0.558068} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.090345] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 95ac4fbf-071a-403e-8cae-88d9899757ba/95ac4fbf-071a-403e-8cae-88d9899757ba.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.090966] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00aa856f-a150-4b7d-b2b4-99b6befe5227 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.098173] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for the task: (returnval){ [ 824.098173] env[62627]: value = "task-2194217" [ 824.098173] env[62627]: _type = "Task" [ 824.098173] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.103879] env[62627]: DEBUG nova.compute.manager [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 824.104258] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 824.108333] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a627f6-b54b-45b1-9d44-d4516385407a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.111653] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194217, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.118702] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 824.118841] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8434a08f-9b57-495a-9775-6ffbc91581b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.126509] env[62627]: DEBUG oslo_vmware.api [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 824.126509] env[62627]: value = "task-2194218" [ 824.126509] env[62627]: _type = "Task" [ 824.126509] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.132228] env[62627]: DEBUG nova.network.neutron [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.140561] env[62627]: DEBUG oslo_vmware.api [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194218, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.274846] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.361019] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 824.361019] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fa7bcbfd-945d-4cb1-8060-c832229edde3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.368123] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 824.368123] env[62627]: value = "task-2194219" [ 824.368123] env[62627]: _type = "Task" [ 824.368123] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.379783] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194219, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.457251] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8a65011e-78d9-423f-b028-9afc812ebe7c tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "15799515-ab7d-4e7c-a6e0-c740baf4f93a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.765s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.557243] env[62627]: DEBUG nova.network.neutron [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Updating instance_info_cache with network_info: [{"id": "3b9cf75c-dd86-44d1-9123-1680cabc589d", "address": "fa:16:3e:0b:0d:26", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b9cf75c-dd", "ovs_interfaceid": "3b9cf75c-dd86-44d1-9123-1680cabc589d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.609508] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194217, 'name': Rename_Task, 'duration_secs': 0.182312} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.609780] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.610036] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fabaaf98-1e2a-4d60-b272-008fc3bf526c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.618031] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for the task: (returnval){ [ 824.618031] env[62627]: value = "task-2194220" [ 824.618031] env[62627]: _type = "Task" [ 824.618031] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.627789] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194220, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.637875] env[62627]: DEBUG oslo_vmware.api [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194218, 'name': PowerOffVM_Task, 'duration_secs': 0.216364} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.638210] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.639054] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 824.639392] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93dc3442-2845-4f61-8c3e-7f17f9f7f605 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.700559] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 824.700842] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 824.701059] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleting the datastore file [datastore2] ea289de9-9b2d-4756-a9a5-0e3dfe8db772 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 824.701328] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c295d6f9-382e-4a38-aadb-cbc77743a65e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.708943] env[62627]: DEBUG oslo_vmware.api [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 824.708943] env[62627]: value = "task-2194222" [ 824.708943] env[62627]: _type = "Task" [ 824.708943] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.720298] env[62627]: DEBUG oslo_vmware.api [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194222, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.878676] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194219, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.992829] env[62627]: DEBUG nova.compute.manager [req-016deeb2-c566-4ab7-b0b8-d4cb48bedb57 req-c4d93c9e-b450-4677-9540-b38a550690d8 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Received event network-changed-3b9cf75c-dd86-44d1-9123-1680cabc589d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 824.992893] env[62627]: DEBUG nova.compute.manager [req-016deeb2-c566-4ab7-b0b8-d4cb48bedb57 req-c4d93c9e-b450-4677-9540-b38a550690d8 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Refreshing instance network info cache due to event network-changed-3b9cf75c-dd86-44d1-9123-1680cabc589d. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 824.993556] env[62627]: DEBUG oslo_concurrency.lockutils [req-016deeb2-c566-4ab7-b0b8-d4cb48bedb57 req-c4d93c9e-b450-4677-9540-b38a550690d8 service nova] Acquiring lock "refresh_cache-cbd30c37-f6c3-4229-8b95-855cab64924c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.062222] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Releasing lock "refresh_cache-cbd30c37-f6c3-4229-8b95-855cab64924c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.062222] env[62627]: DEBUG nova.compute.manager [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Instance network_info: |[{"id": "3b9cf75c-dd86-44d1-9123-1680cabc589d", "address": "fa:16:3e:0b:0d:26", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b9cf75c-dd", "ovs_interfaceid": "3b9cf75c-dd86-44d1-9123-1680cabc589d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 825.062362] env[62627]: DEBUG oslo_concurrency.lockutils [req-016deeb2-c566-4ab7-b0b8-d4cb48bedb57 req-c4d93c9e-b450-4677-9540-b38a550690d8 service nova] Acquired lock "refresh_cache-cbd30c37-f6c3-4229-8b95-855cab64924c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.062362] env[62627]: DEBUG nova.network.neutron [req-016deeb2-c566-4ab7-b0b8-d4cb48bedb57 req-c4d93c9e-b450-4677-9540-b38a550690d8 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Refreshing network info cache for port 3b9cf75c-dd86-44d1-9123-1680cabc589d {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 825.063042] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:0d:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b9cf75c-dd86-44d1-9123-1680cabc589d', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 825.071491] env[62627]: DEBUG oslo.service.loopingcall [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 825.072429] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 825.073555] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3e6c9a3-66a9-420f-963d-aa2f925dfab2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.096721] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 825.096721] env[62627]: value = "task-2194223" [ 825.096721] env[62627]: _type = "Task" [ 825.096721] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.109349] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194223, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.133633] env[62627]: DEBUG oslo_vmware.api [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194220, 'name': PowerOnVM_Task, 'duration_secs': 0.509216} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.133870] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.134076] env[62627]: INFO nova.compute.manager [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Took 7.74 seconds to spawn the instance on the hypervisor. [ 825.134252] env[62627]: DEBUG nova.compute.manager [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 825.137778] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08013521-6572-4a6f-8a42-09c71e0a5dc3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.220479] env[62627]: DEBUG oslo_vmware.api [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194222, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.348926} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.220735] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 825.220922] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 825.221112] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 825.221286] env[62627]: INFO nova.compute.manager [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Took 1.12 seconds to destroy the instance on the hypervisor. [ 825.221614] env[62627]: DEBUG oslo.service.loopingcall [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 825.221817] env[62627]: DEBUG nova.compute.manager [-] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 825.221911] env[62627]: DEBUG nova.network.neutron [-] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 825.379829] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194219, 'name': CloneVM_Task} progress is 95%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.506575] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a0b52c-e673-47f5-88cf-709d684b013a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.514433] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee49a35-833e-4700-b7e7-72d1ade46ec6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.545465] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698cae42-6d5f-4b61-ad7f-269a371f47a7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.553188] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0287f0ed-b715-4d97-98b8-06d3c639903a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.566959] env[62627]: DEBUG nova.compute.provider_tree [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.609331] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194223, 'name': CreateVM_Task, 'duration_secs': 0.371454} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.613012] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 825.613012] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.613012] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.613012] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 825.613426] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-757b9d9c-137e-48d5-8768-730481c2e0d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.618083] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 825.618083] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525d04e1-fd0e-ce5e-e288-8ccc07a9f9fd" [ 825.618083] env[62627]: _type = "Task" [ 825.618083] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.627395] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525d04e1-fd0e-ce5e-e288-8ccc07a9f9fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.660180] env[62627]: INFO nova.compute.manager [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Took 37.07 seconds to build instance. [ 825.828235] env[62627]: DEBUG nova.network.neutron [req-016deeb2-c566-4ab7-b0b8-d4cb48bedb57 req-c4d93c9e-b450-4677-9540-b38a550690d8 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Updated VIF entry in instance network info cache for port 3b9cf75c-dd86-44d1-9123-1680cabc589d. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 825.829069] env[62627]: DEBUG nova.network.neutron [req-016deeb2-c566-4ab7-b0b8-d4cb48bedb57 req-c4d93c9e-b450-4677-9540-b38a550690d8 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Updating instance_info_cache with network_info: [{"id": "3b9cf75c-dd86-44d1-9123-1680cabc589d", "address": "fa:16:3e:0b:0d:26", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b9cf75c-dd", "ovs_interfaceid": "3b9cf75c-dd86-44d1-9123-1680cabc589d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.879466] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194219, 'name': CloneVM_Task, 'duration_secs': 1.211065} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.880358] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Created linked-clone VM from snapshot [ 825.881204] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e158510e-acb6-4792-a047-740dccf1b9a4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.889290] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Uploading image 41a9293e-661f-4792-a6cc-b964646b15b8 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 825.916269] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 825.916269] env[62627]: value = "vm-447627" [ 825.916269] env[62627]: _type = "VirtualMachine" [ 825.916269] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 825.916546] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d6eb5906-58a1-49b9-b7d0-ea89725bf610 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.924131] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lease: (returnval){ [ 825.924131] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529daa35-f351-0b6c-c362-0b3e8503acad" [ 825.924131] env[62627]: _type = "HttpNfcLease" [ 825.924131] env[62627]: } obtained for exporting VM: (result){ [ 825.924131] env[62627]: value = "vm-447627" [ 825.924131] env[62627]: _type = "VirtualMachine" [ 825.924131] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 825.924371] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the lease: (returnval){ [ 825.924371] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529daa35-f351-0b6c-c362-0b3e8503acad" [ 825.924371] env[62627]: _type = "HttpNfcLease" [ 825.924371] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 825.931447] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 825.931447] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529daa35-f351-0b6c-c362-0b3e8503acad" [ 825.931447] env[62627]: _type = "HttpNfcLease" [ 825.931447] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 826.070970] env[62627]: DEBUG nova.scheduler.client.report [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 826.075186] env[62627]: DEBUG nova.network.neutron [-] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.129940] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525d04e1-fd0e-ce5e-e288-8ccc07a9f9fd, 'name': SearchDatastore_Task, 'duration_secs': 0.013246} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.130566] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.131071] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 826.131642] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.131916] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.132208] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 826.132828] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69298437-81a8-47a7-9128-3fa47788066a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.146653] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 826.146653] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 826.147372] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-018cdf3a-48f5-4a23-892c-db0cc16d70b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.152627] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 826.152627] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523b8e50-30f3-d5e4-f31f-173aeb8e6096" [ 826.152627] env[62627]: _type = "Task" [ 826.152627] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.161522] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523b8e50-30f3-d5e4-f31f-173aeb8e6096, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.163079] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67de70ee-4c60-4bcd-83d0-fc3442cb61fc tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "95ac4fbf-071a-403e-8cae-88d9899757ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.805s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.188974] env[62627]: DEBUG oslo_concurrency.lockutils [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "f09c685d-9103-4c94-a44a-266aacc334f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.188974] env[62627]: DEBUG oslo_concurrency.lockutils [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "f09c685d-9103-4c94-a44a-266aacc334f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.188974] env[62627]: DEBUG oslo_concurrency.lockutils [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "f09c685d-9103-4c94-a44a-266aacc334f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.189211] env[62627]: DEBUG oslo_concurrency.lockutils [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "f09c685d-9103-4c94-a44a-266aacc334f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.189211] env[62627]: DEBUG oslo_concurrency.lockutils [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "f09c685d-9103-4c94-a44a-266aacc334f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.191238] env[62627]: INFO nova.compute.manager [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Terminating instance [ 826.330966] env[62627]: DEBUG oslo_concurrency.lockutils [req-016deeb2-c566-4ab7-b0b8-d4cb48bedb57 req-c4d93c9e-b450-4677-9540-b38a550690d8 service nova] Releasing lock "refresh_cache-cbd30c37-f6c3-4229-8b95-855cab64924c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.368233] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "76fb58dc-df20-464b-bda0-dcf09d69c718" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.368485] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "76fb58dc-df20-464b-bda0-dcf09d69c718" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.368687] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "76fb58dc-df20-464b-bda0-dcf09d69c718-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.368864] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "76fb58dc-df20-464b-bda0-dcf09d69c718-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.369043] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "76fb58dc-df20-464b-bda0-dcf09d69c718-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.371191] env[62627]: INFO nova.compute.manager [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Terminating instance [ 826.433217] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 826.433217] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529daa35-f351-0b6c-c362-0b3e8503acad" [ 826.433217] env[62627]: _type = "HttpNfcLease" [ 826.433217] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 826.433515] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 826.433515] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529daa35-f351-0b6c-c362-0b3e8503acad" [ 826.433515] env[62627]: _type = "HttpNfcLease" [ 826.433515] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 826.434320] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a63d505-2dff-42a3-9e4d-4fca5abeab21 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.442151] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2f948-5220-1b56-3f16-0d70c6f410ec/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 826.442321] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2f948-5220-1b56-3f16-0d70c6f410ec/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 826.578157] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.578654] env[62627]: DEBUG nova.compute.manager [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 826.581645] env[62627]: INFO nova.compute.manager [-] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Took 1.36 seconds to deallocate network for instance. [ 826.582146] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.882s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.583551] env[62627]: INFO nova.compute.claims [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.638447] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-71729b1b-550a-4da9-83df-f1b4bc310b24 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.665904] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523b8e50-30f3-d5e4-f31f-173aeb8e6096, 'name': SearchDatastore_Task, 'duration_secs': 0.032321} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.671381] env[62627]: DEBUG nova.compute.manager [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 826.675188] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0cab000-dd48-4759-afeb-f9d3110e1396 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.679832] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dfd24ab3-a96a-4c40-90d5-84ca010785ea tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquiring lock "interface-95ac4fbf-071a-403e-8cae-88d9899757ba-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.680092] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dfd24ab3-a96a-4c40-90d5-84ca010785ea tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "interface-95ac4fbf-071a-403e-8cae-88d9899757ba-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.680397] env[62627]: DEBUG nova.objects.instance [None req-dfd24ab3-a96a-4c40-90d5-84ca010785ea tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lazy-loading 'flavor' on Instance uuid 95ac4fbf-071a-403e-8cae-88d9899757ba {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 826.682834] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 826.682834] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5217ca1e-8bf3-6201-6ce5-6f2f160e2f22" [ 826.682834] env[62627]: _type = "Task" [ 826.682834] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.691901] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5217ca1e-8bf3-6201-6ce5-6f2f160e2f22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.697015] env[62627]: DEBUG nova.compute.manager [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 826.697015] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 826.697015] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8e14a0-d03c-4d90-96c4-432f8a5500e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.705336] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 826.706635] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6a792f6-ce58-4d0d-91bd-a5c06bc1432d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.713501] env[62627]: DEBUG oslo_vmware.api [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 826.713501] env[62627]: value = "task-2194225" [ 826.713501] env[62627]: _type = "Task" [ 826.713501] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.722668] env[62627]: DEBUG oslo_vmware.api [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.874722] env[62627]: DEBUG nova.compute.manager [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 826.874973] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 826.875884] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa2284b-fd3d-4557-81c3-832139b1b9ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.886061] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 826.886347] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66939a05-1aca-44cf-b5c2-e1b00a6ab768 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.893407] env[62627]: DEBUG oslo_vmware.api [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 826.893407] env[62627]: value = "task-2194226" [ 826.893407] env[62627]: _type = "Task" [ 826.893407] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.901399] env[62627]: DEBUG oslo_vmware.api [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.083840] env[62627]: DEBUG nova.compute.utils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.085343] env[62627]: DEBUG nova.compute.manager [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 827.087261] env[62627]: DEBUG nova.network.neutron [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.093039] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.144846] env[62627]: DEBUG nova.policy [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a895c08a57d4396ac7fa5f4ad8642a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d41b3fa69e2473fbea79b0ec52108fa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 827.186151] env[62627]: DEBUG nova.objects.instance [None req-dfd24ab3-a96a-4c40-90d5-84ca010785ea tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lazy-loading 'pci_requests' on Instance uuid 95ac4fbf-071a-403e-8cae-88d9899757ba {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 827.200674] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5217ca1e-8bf3-6201-6ce5-6f2f160e2f22, 'name': SearchDatastore_Task, 'duration_secs': 0.02052} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.201699] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.203037] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.203361] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] cbd30c37-f6c3-4229-8b95-855cab64924c/cbd30c37-f6c3-4229-8b95-855cab64924c.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 827.205127] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11eb1e94-d1a2-4f66-baba-62308caac14e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.212936] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 827.212936] env[62627]: value = "task-2194227" [ 827.212936] env[62627]: _type = "Task" [ 827.212936] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.229743] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194227, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.233154] env[62627]: DEBUG oslo_vmware.api [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194225, 'name': PowerOffVM_Task, 'duration_secs': 0.161674} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.233514] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 827.233810] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 827.234831] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-749b457b-5f92-47db-8fd7-88856291f70a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.396184] env[62627]: DEBUG nova.compute.manager [req-00b27c03-072f-4677-bb88-07e98c1678d6 req-9dac877b-d9e0-40cf-ba15-4fe6b59af5a7 service nova] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Received event network-vif-deleted-fc1db27a-489e-4bc7-a90d-1af91c74e4fd {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 827.408504] env[62627]: DEBUG oslo_vmware.api [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194226, 'name': PowerOffVM_Task, 'duration_secs': 0.170428} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.408875] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 827.409120] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 827.409434] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-822cdaaa-77c9-444f-9789-e6d0d6ea4cf4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.505186] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 827.505501] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 827.505748] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Deleting the datastore file [datastore2] 76fb58dc-df20-464b-bda0-dcf09d69c718 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 827.506078] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-488f3188-748f-4462-99f7-0804217db8b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.517517] env[62627]: DEBUG oslo_vmware.api [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 827.517517] env[62627]: value = "task-2194230" [ 827.517517] env[62627]: _type = "Task" [ 827.517517] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.531263] env[62627]: DEBUG oslo_vmware.api [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.589063] env[62627]: DEBUG nova.compute.manager [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 827.597331] env[62627]: DEBUG nova.network.neutron [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Successfully created port: ddd3e21b-1460-4da2-bfcc-258303d2719e {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.692342] env[62627]: DEBUG nova.objects.base [None req-dfd24ab3-a96a-4c40-90d5-84ca010785ea tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Object Instance<95ac4fbf-071a-403e-8cae-88d9899757ba> lazy-loaded attributes: flavor,pci_requests {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 827.692597] env[62627]: DEBUG nova.network.neutron [None req-dfd24ab3-a96a-4c40-90d5-84ca010785ea tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.730357] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194227, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.870023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dfd24ab3-a96a-4c40-90d5-84ca010785ea tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "interface-95ac4fbf-071a-403e-8cae-88d9899757ba-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.190s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.028963] env[62627]: DEBUG oslo_vmware.api [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.479003} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.029263] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 828.029413] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 828.029588] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 828.029762] env[62627]: INFO nova.compute.manager [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Took 1.15 seconds to destroy the instance on the hypervisor. [ 828.030159] env[62627]: DEBUG oslo.service.loopingcall [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 828.030374] env[62627]: DEBUG nova.compute.manager [-] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 828.030476] env[62627]: DEBUG nova.network.neutron [-] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.231699] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194227, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573331} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.231957] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] cbd30c37-f6c3-4229-8b95-855cab64924c/cbd30c37-f6c3-4229-8b95-855cab64924c.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 828.232131] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 828.233155] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8c09681-2d30-4aa2-b788-c6df209fa27f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.235826] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2cc537-0a21-4bb7-a2b7-6ef7502a2a7e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.244255] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734b44fc-f7d3-4ddd-a42a-d1b1fcd3ccfd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.248742] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 828.248742] env[62627]: value = "task-2194231" [ 828.248742] env[62627]: _type = "Task" [ 828.248742] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.279506] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe2a5b9-1fa4-45a2-bbb6-535067a07638 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.285586] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194231, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.290897] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcc4ba8-1e57-4c9c-9b7d-d7c05e7e5b3a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.304659] env[62627]: DEBUG nova.compute.provider_tree [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.603971] env[62627]: DEBUG nova.compute.manager [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 828.630208] env[62627]: DEBUG nova.virt.hardware [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 828.630470] env[62627]: DEBUG nova.virt.hardware [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.630675] env[62627]: DEBUG nova.virt.hardware [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 828.630934] env[62627]: DEBUG nova.virt.hardware [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.631109] env[62627]: DEBUG nova.virt.hardware [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 828.631282] env[62627]: DEBUG nova.virt.hardware [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 828.631542] env[62627]: DEBUG nova.virt.hardware [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 828.631754] env[62627]: DEBUG nova.virt.hardware [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 828.632010] env[62627]: DEBUG nova.virt.hardware [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 828.632201] env[62627]: DEBUG nova.virt.hardware [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 828.632378] env[62627]: DEBUG nova.virt.hardware [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 828.633402] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ae85e6-c2e5-49ec-b56b-e969218fa54e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.641617] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711e1a09-bb15-40d3-8743-a850e315df08 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.758881] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194231, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.810199] env[62627]: DEBUG nova.scheduler.client.report [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 828.856144] env[62627]: DEBUG nova.network.neutron [-] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.263828] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194231, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.321023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.736s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.321023] env[62627]: DEBUG nova.compute.manager [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 829.321859] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 29.270s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.322214] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.322488] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 829.322885] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.096s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.324447] env[62627]: INFO nova.compute.claims [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.327966] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9814e66b-dbc1-4f84-9629-239d5c10d80e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.337148] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6502c85e-7dc5-4f23-8f65-d230b06fcdae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.358089] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f06bfb-b6c3-4677-91c2-859f0768d18b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.361352] env[62627]: INFO nova.compute.manager [-] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Took 1.33 seconds to deallocate network for instance. [ 829.370890] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3795726-79c7-4c52-9baa-02d65d7d50f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.411332] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179102MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 829.411512] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.511393] env[62627]: DEBUG nova.compute.manager [req-7d579174-dcda-4963-888b-81089a475d83 req-d76a3839-395c-4102-b8a0-f9b8ee9a1c0f service nova] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Received event network-vif-deleted-ffc6e4e3-3dcd-43a4-8fa4-dc9c8ffa3e2b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 829.540894] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 829.541534] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 829.541904] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Deleting the datastore file [datastore1] f09c685d-9103-4c94-a44a-266aacc334f3 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 829.542207] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc99dfd9-97f2-459b-8703-98ebc641b5be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.549633] env[62627]: DEBUG oslo_vmware.api [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for the task: (returnval){ [ 829.549633] env[62627]: value = "task-2194232" [ 829.549633] env[62627]: _type = "Task" [ 829.549633] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.557648] env[62627]: DEBUG oslo_vmware.api [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.569308] env[62627]: DEBUG nova.network.neutron [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Successfully updated port: ddd3e21b-1460-4da2-bfcc-258303d2719e {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 829.762096] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194231, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.829342] env[62627]: DEBUG nova.compute.utils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 829.833962] env[62627]: DEBUG nova.compute.manager [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 829.835212] env[62627]: DEBUG nova.network.neutron [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 829.871115] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.880670] env[62627]: DEBUG nova.policy [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a895c08a57d4396ac7fa5f4ad8642a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d41b3fa69e2473fbea79b0ec52108fa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 829.882589] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquiring lock "95ac4fbf-071a-403e-8cae-88d9899757ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.882746] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "95ac4fbf-071a-403e-8cae-88d9899757ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.882931] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquiring lock "95ac4fbf-071a-403e-8cae-88d9899757ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.883162] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "95ac4fbf-071a-403e-8cae-88d9899757ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.883357] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "95ac4fbf-071a-403e-8cae-88d9899757ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.885691] env[62627]: INFO nova.compute.manager [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Terminating instance [ 830.060110] env[62627]: DEBUG oslo_vmware.api [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Task: {'id': task-2194232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275344} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.060110] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 830.060466] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 830.060796] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 830.061106] env[62627]: INFO nova.compute.manager [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Took 3.37 seconds to destroy the instance on the hypervisor. [ 830.061460] env[62627]: DEBUG oslo.service.loopingcall [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.061786] env[62627]: DEBUG nova.compute.manager [-] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 830.061989] env[62627]: DEBUG nova.network.neutron [-] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 830.072415] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.072676] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.072939] env[62627]: DEBUG nova.network.neutron [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.265023] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194231, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.539633} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.265023] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 830.265023] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4bbef9-1ce0-4596-992c-9787580f3423 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.293660] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] cbd30c37-f6c3-4229-8b95-855cab64924c/cbd30c37-f6c3-4229-8b95-855cab64924c.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 830.294018] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4757bf39-0cda-404e-a0fe-2f10c86f8ace {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.315165] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 830.315165] env[62627]: value = "task-2194233" [ 830.315165] env[62627]: _type = "Task" [ 830.315165] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.323717] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.333527] env[62627]: DEBUG nova.compute.manager [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 830.342350] env[62627]: DEBUG nova.network.neutron [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Successfully created port: 4eb9bc28-5e4b-4b2e-859d-31b930210e13 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.388884] env[62627]: DEBUG nova.compute.manager [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 830.389177] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 830.390356] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817d478c-ee85-4898-a165-da9fc3dc28b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.401708] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 830.402680] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-933170c9-800b-464f-b943-d774b7fa68bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.408057] env[62627]: DEBUG oslo_vmware.api [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for the task: (returnval){ [ 830.408057] env[62627]: value = "task-2194234" [ 830.408057] env[62627]: _type = "Task" [ 830.408057] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.418872] env[62627]: DEBUG oslo_vmware.api [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194234, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.630922] env[62627]: DEBUG nova.network.neutron [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.826996] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194233, 'name': ReconfigVM_Task, 'duration_secs': 0.49956} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.827310] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Reconfigured VM instance instance-0000001f to attach disk [datastore2] cbd30c37-f6c3-4229-8b95-855cab64924c/cbd30c37-f6c3-4229-8b95-855cab64924c.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 830.828031] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9b9b9282-1cd5-4a06-8c6e-630de3f268f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.841722] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 830.841722] env[62627]: value = "task-2194235" [ 830.841722] env[62627]: _type = "Task" [ 830.841722] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.863745] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194235, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.921388] env[62627]: DEBUG oslo_vmware.api [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194234, 'name': PowerOffVM_Task, 'duration_secs': 0.20638} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.922773] env[62627]: DEBUG nova.network.neutron [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Updating instance_info_cache with network_info: [{"id": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "address": "fa:16:3e:e3:64:4d", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddd3e21b-14", "ovs_interfaceid": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.923332] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 830.923502] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 830.923778] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fdc4e32-fb3f-415a-8c6b-61918e15f7fc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.983722] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 830.984056] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 830.984279] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Deleting the datastore file [datastore2] 95ac4fbf-071a-403e-8cae-88d9899757ba {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 830.984635] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da93416e-c9bc-4fb3-9367-f16156916150 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.991368] env[62627]: DEBUG nova.network.neutron [-] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.994438] env[62627]: DEBUG oslo_vmware.api [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for the task: (returnval){ [ 830.994438] env[62627]: value = "task-2194237" [ 830.994438] env[62627]: _type = "Task" [ 830.994438] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.003605] env[62627]: DEBUG oslo_vmware.api [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.040486] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec30704-b46c-47b0-ab98-317ae7131518 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.049508] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbbbf60-331c-4f22-9631-4ba07b6d44b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.081760] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0159a2b-6742-46f1-972e-d6e8b4246f3a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.089625] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2984354f-72f4-4f5b-84ed-6b0d62d2abc1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.105059] env[62627]: DEBUG nova.compute.provider_tree [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.344523] env[62627]: DEBUG nova.compute.manager [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 831.356471] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194235, 'name': Rename_Task, 'duration_secs': 0.141453} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.356748] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 831.356987] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2b43e8d-7cbf-46a3-9be0-73308bf489d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.366243] env[62627]: DEBUG nova.virt.hardware [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.366494] env[62627]: DEBUG nova.virt.hardware [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.366651] env[62627]: DEBUG nova.virt.hardware [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.366887] env[62627]: DEBUG nova.virt.hardware [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.366974] env[62627]: DEBUG nova.virt.hardware [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.367240] env[62627]: DEBUG nova.virt.hardware [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.367465] env[62627]: DEBUG nova.virt.hardware [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.367626] env[62627]: DEBUG nova.virt.hardware [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.367828] env[62627]: DEBUG nova.virt.hardware [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.368009] env[62627]: DEBUG nova.virt.hardware [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.368194] env[62627]: DEBUG nova.virt.hardware [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.369039] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b295848-dc5e-406e-ba2d-55bc4ce9e91f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.373524] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 831.373524] env[62627]: value = "task-2194238" [ 831.373524] env[62627]: _type = "Task" [ 831.373524] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.380009] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b599b34a-54d5-42e1-b368-f676c97e5f2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.389827] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194238, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.428041] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.428205] env[62627]: DEBUG nova.compute.manager [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Instance network_info: |[{"id": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "address": "fa:16:3e:e3:64:4d", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddd3e21b-14", "ovs_interfaceid": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 831.428500] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:64:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc9714ff-7109-4ea1-9435-b2b3fbdb9e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ddd3e21b-1460-4da2-bfcc-258303d2719e', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.436440] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Creating folder: Project (2d41b3fa69e2473fbea79b0ec52108fa). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.436791] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d8bfedb-559a-4502-a655-abbbda794f60 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.455062] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Created folder: Project (2d41b3fa69e2473fbea79b0ec52108fa) in parent group-v447541. [ 831.455062] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Creating folder: Instances. Parent ref: group-v447629. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.455062] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17dc6f55-48b0-4338-ac10-62287d6ffc52 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.466384] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Created folder: Instances in parent group-v447629. [ 831.466705] env[62627]: DEBUG oslo.service.loopingcall [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.466989] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.467261] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab3bb9b9-06c8-4a12-8070-a9c9ce77d2a3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.488082] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.488082] env[62627]: value = "task-2194241" [ 831.488082] env[62627]: _type = "Task" [ 831.488082] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.496682] env[62627]: INFO nova.compute.manager [-] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Took 1.43 seconds to deallocate network for instance. [ 831.497058] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194241, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.508884] env[62627]: DEBUG oslo_vmware.api [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Task: {'id': task-2194237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186324} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.509131] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 831.509311] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 831.509483] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 831.509651] env[62627]: INFO nova.compute.manager [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Took 1.12 seconds to destroy the instance on the hypervisor. [ 831.509890] env[62627]: DEBUG oslo.service.loopingcall [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.510170] env[62627]: DEBUG nova.compute.manager [-] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 831.510259] env[62627]: DEBUG nova.network.neutron [-] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 831.543188] env[62627]: DEBUG nova.compute.manager [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Received event network-vif-plugged-ddd3e21b-1460-4da2-bfcc-258303d2719e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 831.543407] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] Acquiring lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.543660] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] Lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.543879] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] Lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.544106] env[62627]: DEBUG nova.compute.manager [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] No waiting events found dispatching network-vif-plugged-ddd3e21b-1460-4da2-bfcc-258303d2719e {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 831.544324] env[62627]: WARNING nova.compute.manager [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Received unexpected event network-vif-plugged-ddd3e21b-1460-4da2-bfcc-258303d2719e for instance with vm_state building and task_state spawning. [ 831.544577] env[62627]: DEBUG nova.compute.manager [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Received event network-changed-ddd3e21b-1460-4da2-bfcc-258303d2719e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 831.544767] env[62627]: DEBUG nova.compute.manager [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Refreshing instance network info cache due to event network-changed-ddd3e21b-1460-4da2-bfcc-258303d2719e. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 831.544934] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] Acquiring lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.545083] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] Acquired lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.545246] env[62627]: DEBUG nova.network.neutron [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Refreshing network info cache for port ddd3e21b-1460-4da2-bfcc-258303d2719e {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.609194] env[62627]: DEBUG nova.scheduler.client.report [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 831.884039] env[62627]: DEBUG oslo_vmware.api [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194238, 'name': PowerOnVM_Task, 'duration_secs': 0.514498} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.884358] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 831.884517] env[62627]: INFO nova.compute.manager [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Took 9.62 seconds to spawn the instance on the hypervisor. [ 831.884744] env[62627]: DEBUG nova.compute.manager [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 831.885731] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3935d1c0-cdcc-4cb6-b5c0-0879a160dc16 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.999559] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194241, 'name': CreateVM_Task, 'duration_secs': 0.353045} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.000104] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.000813] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.001155] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.001417] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 832.001624] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-970b753b-4dcc-42a6-b2d1-780280470c44 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.006835] env[62627]: DEBUG oslo_concurrency.lockutils [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.007206] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 832.007206] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520f5490-2ab4-0d57-d6db-0cce274364ad" [ 832.007206] env[62627]: _type = "Task" [ 832.007206] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.015340] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520f5490-2ab4-0d57-d6db-0cce274364ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.117779] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.118298] env[62627]: DEBUG nova.compute.manager [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 832.120913] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.367s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.122694] env[62627]: INFO nova.compute.claims [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.300346] env[62627]: DEBUG nova.network.neutron [-] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.366133] env[62627]: DEBUG nova.network.neutron [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Successfully updated port: 4eb9bc28-5e4b-4b2e-859d-31b930210e13 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.406180] env[62627]: INFO nova.compute.manager [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Took 39.40 seconds to build instance. [ 832.425226] env[62627]: DEBUG nova.network.neutron [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Updated VIF entry in instance network info cache for port ddd3e21b-1460-4da2-bfcc-258303d2719e. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.425578] env[62627]: DEBUG nova.network.neutron [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Updating instance_info_cache with network_info: [{"id": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "address": "fa:16:3e:e3:64:4d", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddd3e21b-14", "ovs_interfaceid": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.518075] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520f5490-2ab4-0d57-d6db-0cce274364ad, 'name': SearchDatastore_Task, 'duration_secs': 0.011131} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.518379] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.518608] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.518841] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.518982] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.519174] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.519437] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b02f61c-1204-4812-a821-10f02fbc279b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.528218] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.528403] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 832.529154] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c14c2204-0c44-4709-a8f9-37c83aead54e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.534856] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 832.534856] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]526e80e2-8ef4-da2b-0985-e539edc78e28" [ 832.534856] env[62627]: _type = "Task" [ 832.534856] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.543556] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526e80e2-8ef4-da2b-0985-e539edc78e28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.630140] env[62627]: DEBUG nova.compute.utils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 832.631667] env[62627]: DEBUG nova.compute.manager [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 832.631877] env[62627]: DEBUG nova.network.neutron [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 832.695360] env[62627]: DEBUG nova.policy [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e513ed7742d48038a96de3832c175c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8087909554564a15a99bb0d60937d82b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 832.802097] env[62627]: INFO nova.compute.manager [-] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Took 1.29 seconds to deallocate network for instance. [ 832.871560] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "refresh_cache-aa181094-cb78-44e0-8d7a-1d677e0af1fe" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.871560] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "refresh_cache-aa181094-cb78-44e0-8d7a-1d677e0af1fe" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.871847] env[62627]: DEBUG nova.network.neutron [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 832.908367] env[62627]: DEBUG oslo_concurrency.lockutils [None req-87da9a5d-3b3c-49e7-8b20-261630332a0d tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "cbd30c37-f6c3-4229-8b95-855cab64924c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.024s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.928606] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] Releasing lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.928880] env[62627]: DEBUG nova.compute.manager [req-ed53dc18-9cd0-44f1-933b-cf7c73aab7c0 req-15333927-7c86-4983-a8ef-c4b094b2e835 service nova] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Received event network-vif-deleted-6d227bba-fdfe-4963-87cf-9c8bc5e7d9ee {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 833.048181] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526e80e2-8ef4-da2b-0985-e539edc78e28, 'name': SearchDatastore_Task, 'duration_secs': 0.010717} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.048942] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-707da174-e7cf-4a86-94b0-3ef31c1aa95e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.056045] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 833.056045] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5283a629-efb8-8354-1a0c-a95ed8d94710" [ 833.056045] env[62627]: _type = "Task" [ 833.056045] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.064941] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5283a629-efb8-8354-1a0c-a95ed8d94710, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.137477] env[62627]: DEBUG nova.network.neutron [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Successfully created port: abd8176d-4f10-4dcc-9b49-f53a48ec4cc4 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.139418] env[62627]: DEBUG nova.compute.manager [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 833.311514] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.414366] env[62627]: DEBUG nova.compute.manager [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 833.439878] env[62627]: DEBUG nova.network.neutron [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.568378] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5283a629-efb8-8354-1a0c-a95ed8d94710, 'name': SearchDatastore_Task, 'duration_secs': 0.010542} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.571201] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.572422] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 241311b0-ba81-41d1-a9b6-cd40d04dbe3c/241311b0-ba81-41d1-a9b6-cd40d04dbe3c.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 833.572787] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0718e20-5750-4a56-b79b-7a8f32953830 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.580199] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 833.580199] env[62627]: value = "task-2194242" [ 833.580199] env[62627]: _type = "Task" [ 833.580199] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.589327] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194242, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.813065] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "cbd30c37-f6c3-4229-8b95-855cab64924c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.813065] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "cbd30c37-f6c3-4229-8b95-855cab64924c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.813065] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "cbd30c37-f6c3-4229-8b95-855cab64924c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.813065] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "cbd30c37-f6c3-4229-8b95-855cab64924c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.813509] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "cbd30c37-f6c3-4229-8b95-855cab64924c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.814049] env[62627]: INFO nova.compute.manager [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Terminating instance [ 833.832729] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000f0ab0-5ccc-4829-a9e1-741fede79e2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.843287] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f47291-bfa1-4cab-9dee-40e0b964d010 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.888426] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5a076d-cd06-45f0-9e97-ca734c3c5c4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.895583] env[62627]: DEBUG nova.compute.manager [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Received event network-vif-deleted-d126d7bd-d076-4411-8929-af4270455b6d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 833.895832] env[62627]: DEBUG nova.compute.manager [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Received event network-vif-plugged-4eb9bc28-5e4b-4b2e-859d-31b930210e13 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 833.896074] env[62627]: DEBUG oslo_concurrency.lockutils [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] Acquiring lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.896287] env[62627]: DEBUG oslo_concurrency.lockutils [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] Lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.896453] env[62627]: DEBUG oslo_concurrency.lockutils [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] Lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.896614] env[62627]: DEBUG nova.compute.manager [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] No waiting events found dispatching network-vif-plugged-4eb9bc28-5e4b-4b2e-859d-31b930210e13 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 833.896771] env[62627]: WARNING nova.compute.manager [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Received unexpected event network-vif-plugged-4eb9bc28-5e4b-4b2e-859d-31b930210e13 for instance with vm_state building and task_state spawning. [ 833.896980] env[62627]: DEBUG nova.compute.manager [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Received event network-changed-4eb9bc28-5e4b-4b2e-859d-31b930210e13 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 833.897150] env[62627]: DEBUG nova.compute.manager [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Refreshing instance network info cache due to event network-changed-4eb9bc28-5e4b-4b2e-859d-31b930210e13. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 833.897365] env[62627]: DEBUG oslo_concurrency.lockutils [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] Acquiring lock "refresh_cache-aa181094-cb78-44e0-8d7a-1d677e0af1fe" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.902957] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e437d7-c883-4f9f-81bc-5c2090596e1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.924159] env[62627]: DEBUG nova.compute.provider_tree [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 833.957716] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.969716] env[62627]: DEBUG nova.network.neutron [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Updating instance_info_cache with network_info: [{"id": "4eb9bc28-5e4b-4b2e-859d-31b930210e13", "address": "fa:16:3e:fc:b7:08", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4eb9bc28-5e", "ovs_interfaceid": "4eb9bc28-5e4b-4b2e-859d-31b930210e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.092718] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194242, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.149633] env[62627]: DEBUG nova.compute.manager [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 834.178895] env[62627]: DEBUG nova.virt.hardware [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 834.178895] env[62627]: DEBUG nova.virt.hardware [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.178895] env[62627]: DEBUG nova.virt.hardware [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 834.179194] env[62627]: DEBUG nova.virt.hardware [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.179194] env[62627]: DEBUG nova.virt.hardware [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 834.179194] env[62627]: DEBUG nova.virt.hardware [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 834.179301] env[62627]: DEBUG nova.virt.hardware [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 834.179511] env[62627]: DEBUG nova.virt.hardware [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 834.179697] env[62627]: DEBUG nova.virt.hardware [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 834.179862] env[62627]: DEBUG nova.virt.hardware [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 834.180203] env[62627]: DEBUG nova.virt.hardware [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 834.181114] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56840439-9de4-4493-b49b-884dc2586329 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.189388] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a087ff-4e39-44a1-8370-735e3513ed27 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.320026] env[62627]: DEBUG nova.compute.manager [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 834.320026] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.321007] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9533e1-8d44-4dcd-a4c8-d29cec73452d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.330197] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.330197] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66376535-073a-4fbc-a142-8a95994bdb82 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.337919] env[62627]: DEBUG oslo_vmware.api [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 834.337919] env[62627]: value = "task-2194243" [ 834.337919] env[62627]: _type = "Task" [ 834.337919] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.347199] env[62627]: DEBUG oslo_vmware.api [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194243, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.449446] env[62627]: ERROR nova.scheduler.client.report [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [req-d702a09e-07c8-47da-818f-192258d24a77] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d702a09e-07c8-47da-818f-192258d24a77"}]} [ 834.461483] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2f948-5220-1b56-3f16-0d70c6f410ec/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 834.462450] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a9bc59-1791-48ae-94c6-63237b03151f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.467871] env[62627]: DEBUG nova.scheduler.client.report [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 834.471365] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2f948-5220-1b56-3f16-0d70c6f410ec/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 834.471525] env[62627]: ERROR oslo_vmware.rw_handles [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2f948-5220-1b56-3f16-0d70c6f410ec/disk-0.vmdk due to incomplete transfer. [ 834.471947] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "refresh_cache-aa181094-cb78-44e0-8d7a-1d677e0af1fe" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.472293] env[62627]: DEBUG nova.compute.manager [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Instance network_info: |[{"id": "4eb9bc28-5e4b-4b2e-859d-31b930210e13", "address": "fa:16:3e:fc:b7:08", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4eb9bc28-5e", "ovs_interfaceid": "4eb9bc28-5e4b-4b2e-859d-31b930210e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 834.472492] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-20df24a3-8e41-40f9-8fbe-fa5114d181a3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.474545] env[62627]: DEBUG oslo_concurrency.lockutils [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] Acquired lock "refresh_cache-aa181094-cb78-44e0-8d7a-1d677e0af1fe" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.474820] env[62627]: DEBUG nova.network.neutron [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Refreshing network info cache for port 4eb9bc28-5e4b-4b2e-859d-31b930210e13 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.476735] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:b7:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc9714ff-7109-4ea1-9435-b2b3fbdb9e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4eb9bc28-5e4b-4b2e-859d-31b930210e13', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.484601] env[62627]: DEBUG oslo.service.loopingcall [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.485163] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 834.487621] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8741d508-fbfe-4807-a8d7-1de1321bd8a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.502830] env[62627]: DEBUG nova.scheduler.client.report [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 834.503057] env[62627]: DEBUG nova.compute.provider_tree [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 834.506573] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2f948-5220-1b56-3f16-0d70c6f410ec/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 834.506763] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Uploaded image 41a9293e-661f-4792-a6cc-b964646b15b8 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 834.509052] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 834.510024] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-089ac3f0-91a3-4abc-a6c2-8fefa0f047b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.514847] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.514847] env[62627]: value = "task-2194244" [ 834.514847] env[62627]: _type = "Task" [ 834.514847] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.518868] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 834.518868] env[62627]: value = "task-2194245" [ 834.518868] env[62627]: _type = "Task" [ 834.518868] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.519714] env[62627]: DEBUG nova.scheduler.client.report [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 834.530358] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194244, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.535615] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194245, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.539550] env[62627]: DEBUG nova.scheduler.client.report [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 834.597601] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194242, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583365} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.597867] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 241311b0-ba81-41d1-a9b6-cd40d04dbe3c/241311b0-ba81-41d1-a9b6-cd40d04dbe3c.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 834.598116] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 834.598376] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-413fb2f2-797d-4574-8a19-d80f8147f1a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.608605] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 834.608605] env[62627]: value = "task-2194246" [ 834.608605] env[62627]: _type = "Task" [ 834.608605] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.617686] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194246, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.849074] env[62627]: DEBUG oslo_vmware.api [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194243, 'name': PowerOffVM_Task, 'duration_secs': 0.206865} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.849358] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 834.849543] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 834.849844] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06e65cd3-022d-4c5a-bd91-0cb26cab000c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.919215] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 834.919447] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 834.919629] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Deleting the datastore file [datastore2] cbd30c37-f6c3-4229-8b95-855cab64924c {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 834.922824] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08d330d0-7949-4398-9cce-e0b528824db9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.929404] env[62627]: DEBUG oslo_vmware.api [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for the task: (returnval){ [ 834.929404] env[62627]: value = "task-2194248" [ 834.929404] env[62627]: _type = "Task" [ 834.929404] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.939450] env[62627]: DEBUG oslo_vmware.api [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194248, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.026737] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194244, 'name': CreateVM_Task, 'duration_secs': 0.382808} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.030313] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 835.031812] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.032134] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.032723] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 835.033407] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b531b897-c810-43e2-a4fe-5cf2c6350fd2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.038845] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194245, 'name': Destroy_Task, 'duration_secs': 0.303796} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.045030] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Destroyed the VM [ 835.045030] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 835.046254] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ced62ddb-98ed-48e4-86e2-337eeadea5a8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.049372] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 835.049372] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52907cec-1437-2afd-55bd-e9d401e7d5ac" [ 835.049372] env[62627]: _type = "Task" [ 835.049372] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.059165] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52907cec-1437-2afd-55bd-e9d401e7d5ac, 'name': SearchDatastore_Task, 'duration_secs': 0.010414} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.062792] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.063200] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 835.063567] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.063863] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.064193] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.064638] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 835.064638] env[62627]: value = "task-2194249" [ 835.064638] env[62627]: _type = "Task" [ 835.064638] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.065242] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3fa7285-163e-43c2-b631-0c1f13fc3a15 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.075302] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194249, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.079020] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c8dcd7-24f9-4b62-ab3c-20ae12f3aa55 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.080927] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.081244] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 835.082009] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df9b3231-86ac-4a73-982b-b7db16e7554e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.089687] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5227d6-95cb-4a92-a1fc-519af29158b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.093982] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 835.093982] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5256b712-e9fe-4269-1266-e078d529eaec" [ 835.093982] env[62627]: _type = "Task" [ 835.093982] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.130325] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db36b220-ecd1-48d8-97a1-97ef11359f57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.137471] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5256b712-e9fe-4269-1266-e078d529eaec, 'name': SearchDatastore_Task, 'duration_secs': 0.038949} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.139494] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba97bf37-86eb-4185-acf5-dcc98ff849ee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.150018] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194246, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080489} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.150018] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957a1b99-b42c-417c-835e-88bedecbf0a3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.154179] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.156211] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c57fc8-a62b-49da-b196-3a06e090fd34 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.158839] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 835.158839] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52221fc6-c7ec-6c71-ad94-af3f3921aa41" [ 835.158839] env[62627]: _type = "Task" [ 835.158839] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.181625] env[62627]: DEBUG nova.compute.provider_tree [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.191816] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 241311b0-ba81-41d1-a9b6-cd40d04dbe3c/241311b0-ba81-41d1-a9b6-cd40d04dbe3c.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.193382] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dad59332-499a-4196-af4a-73395e9f3ba5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.215482] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52221fc6-c7ec-6c71-ad94-af3f3921aa41, 'name': SearchDatastore_Task, 'duration_secs': 0.009107} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.216304] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.216498] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] aa181094-cb78-44e0-8d7a-1d677e0af1fe/aa181094-cb78-44e0-8d7a-1d677e0af1fe.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.217044] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a76bdd51-4b06-43fa-a5b6-a100e2513462 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.221346] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 835.221346] env[62627]: value = "task-2194250" [ 835.221346] env[62627]: _type = "Task" [ 835.221346] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.225377] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 835.225377] env[62627]: value = "task-2194251" [ 835.225377] env[62627]: _type = "Task" [ 835.225377] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.231833] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194250, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.236362] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194251, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.397533] env[62627]: DEBUG nova.network.neutron [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Successfully updated port: abd8176d-4f10-4dcc-9b49-f53a48ec4cc4 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 835.411936] env[62627]: DEBUG nova.network.neutron [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Updated VIF entry in instance network info cache for port 4eb9bc28-5e4b-4b2e-859d-31b930210e13. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.411936] env[62627]: DEBUG nova.network.neutron [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Updating instance_info_cache with network_info: [{"id": "4eb9bc28-5e4b-4b2e-859d-31b930210e13", "address": "fa:16:3e:fc:b7:08", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4eb9bc28-5e", "ovs_interfaceid": "4eb9bc28-5e4b-4b2e-859d-31b930210e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.439115] env[62627]: DEBUG oslo_vmware.api [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Task: {'id': task-2194248, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197548} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.439371] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.439553] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 835.439728] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 835.439906] env[62627]: INFO nova.compute.manager [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 835.440183] env[62627]: DEBUG oslo.service.loopingcall [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.440405] env[62627]: DEBUG nova.compute.manager [-] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 835.440507] env[62627]: DEBUG nova.network.neutron [-] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 835.576510] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194249, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.694264] env[62627]: DEBUG nova.scheduler.client.report [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 835.740557] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194250, 'name': ReconfigVM_Task, 'duration_secs': 0.277334} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.744192] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 241311b0-ba81-41d1-a9b6-cd40d04dbe3c/241311b0-ba81-41d1-a9b6-cd40d04dbe3c.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 835.744885] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194251, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.745690] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95e3a52a-ce20-4142-8861-c4e71aa0e382 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.755618] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 835.755618] env[62627]: value = "task-2194252" [ 835.755618] env[62627]: _type = "Task" [ 835.755618] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.766527] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194252, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.906615] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "refresh_cache-cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.906615] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquired lock "refresh_cache-cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.906897] env[62627]: DEBUG nova.network.neutron [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 835.914841] env[62627]: DEBUG oslo_concurrency.lockutils [req-020b5e5d-424b-4d47-8490-05d571d6dd25 req-f6c66c8b-d174-4da9-9c0c-d6b9f15cd72c service nova] Releasing lock "refresh_cache-aa181094-cb78-44e0-8d7a-1d677e0af1fe" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.945274] env[62627]: DEBUG nova.compute.manager [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Received event network-vif-plugged-abd8176d-4f10-4dcc-9b49-f53a48ec4cc4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 835.945491] env[62627]: DEBUG oslo_concurrency.lockutils [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] Acquiring lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.945698] env[62627]: DEBUG oslo_concurrency.lockutils [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] Lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.945865] env[62627]: DEBUG oslo_concurrency.lockutils [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] Lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.946040] env[62627]: DEBUG nova.compute.manager [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] No waiting events found dispatching network-vif-plugged-abd8176d-4f10-4dcc-9b49-f53a48ec4cc4 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 835.946203] env[62627]: WARNING nova.compute.manager [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Received unexpected event network-vif-plugged-abd8176d-4f10-4dcc-9b49-f53a48ec4cc4 for instance with vm_state building and task_state spawning. [ 835.946360] env[62627]: DEBUG nova.compute.manager [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Received event network-changed-abd8176d-4f10-4dcc-9b49-f53a48ec4cc4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 835.946512] env[62627]: DEBUG nova.compute.manager [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Refreshing instance network info cache due to event network-changed-abd8176d-4f10-4dcc-9b49-f53a48ec4cc4. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 835.946674] env[62627]: DEBUG oslo_concurrency.lockutils [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] Acquiring lock "refresh_cache-cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.078516] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194249, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.200302] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.079s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.201432] env[62627]: DEBUG nova.compute.manager [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 836.203804] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.276s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.204058] env[62627]: DEBUG nova.objects.instance [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lazy-loading 'resources' on Instance uuid 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 836.238724] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194251, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.771473} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.238984] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] aa181094-cb78-44e0-8d7a-1d677e0af1fe/aa181094-cb78-44e0-8d7a-1d677e0af1fe.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.239231] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.239486] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f6ca3f8-97e4-4c22-b4ad-aeb94c6fe7d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.244217] env[62627]: DEBUG nova.network.neutron [-] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.247753] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 836.247753] env[62627]: value = "task-2194253" [ 836.247753] env[62627]: _type = "Task" [ 836.247753] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.255519] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194253, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.265180] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194252, 'name': Rename_Task, 'duration_secs': 0.287763} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.265437] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.265666] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82e9c5d6-ef06-43ee-af5f-59de34da8ac8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.271283] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 836.271283] env[62627]: value = "task-2194254" [ 836.271283] env[62627]: _type = "Task" [ 836.271283] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.279211] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194254, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.441797] env[62627]: DEBUG nova.network.neutron [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 836.580218] env[62627]: DEBUG oslo_vmware.api [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194249, 'name': RemoveSnapshot_Task, 'duration_secs': 1.235045} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.580218] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 836.580493] env[62627]: INFO nova.compute.manager [None req-0f2d6fcd-c542-44b2-a5fb-1bfd349ffe43 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Took 14.81 seconds to snapshot the instance on the hypervisor. [ 836.603334] env[62627]: DEBUG nova.network.neutron [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Updating instance_info_cache with network_info: [{"id": "abd8176d-4f10-4dcc-9b49-f53a48ec4cc4", "address": "fa:16:3e:74:34:18", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabd8176d-4f", "ovs_interfaceid": "abd8176d-4f10-4dcc-9b49-f53a48ec4cc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.707596] env[62627]: DEBUG nova.compute.utils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 836.712312] env[62627]: DEBUG nova.compute.manager [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 836.712312] env[62627]: DEBUG nova.network.neutron [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.750538] env[62627]: INFO nova.compute.manager [-] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Took 1.31 seconds to deallocate network for instance. [ 836.767367] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194253, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.781702] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194254, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.782652] env[62627]: DEBUG nova.policy [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e513ed7742d48038a96de3832c175c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8087909554564a15a99bb0d60937d82b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 837.105607] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Releasing lock "refresh_cache-cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.106044] env[62627]: DEBUG nova.compute.manager [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Instance network_info: |[{"id": "abd8176d-4f10-4dcc-9b49-f53a48ec4cc4", "address": "fa:16:3e:74:34:18", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabd8176d-4f", "ovs_interfaceid": "abd8176d-4f10-4dcc-9b49-f53a48ec4cc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 837.108817] env[62627]: DEBUG oslo_concurrency.lockutils [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] Acquired lock "refresh_cache-cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.109019] env[62627]: DEBUG nova.network.neutron [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Refreshing network info cache for port abd8176d-4f10-4dcc-9b49-f53a48ec4cc4 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 837.110438] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:34:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'abd8176d-4f10-4dcc-9b49-f53a48ec4cc4', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 837.118675] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Creating folder: Project (8087909554564a15a99bb0d60937d82b). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 837.121731] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce821e38-6f7c-4394-bd1a-7285f54fb63d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.137179] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Created folder: Project (8087909554564a15a99bb0d60937d82b) in parent group-v447541. [ 837.137404] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Creating folder: Instances. Parent ref: group-v447633. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 837.137910] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f2aec511-fbdc-4c02-9299-c212507edb0b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.149469] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Created folder: Instances in parent group-v447633. [ 837.150051] env[62627]: DEBUG oslo.service.loopingcall [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.150051] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 837.150184] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e6e740e-a41b-4c5d-88ba-ae4fce80e728 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.166322] env[62627]: DEBUG nova.network.neutron [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Successfully created port: 7b046d10-1606-48ca-9f84-f34e991b5dd5 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.179168] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.179168] env[62627]: value = "task-2194257" [ 837.179168] env[62627]: _type = "Task" [ 837.179168] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.192854] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194257, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.213116] env[62627]: DEBUG nova.compute.manager [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 837.262928] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194253, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.904949} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.263815] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.264728] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.265906] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d1adbc-ef81-4b99-a8b4-e912c60aa063 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.292699] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] aa181094-cb78-44e0-8d7a-1d677e0af1fe/aa181094-cb78-44e0-8d7a-1d677e0af1fe.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.299219] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6971587-ee37-44dc-b569-2fcad9e4a810 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.316062] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfb7f4e-9e37-4fb6-91e0-d63743ac16e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.329215] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989f576a-d77d-4907-9650-55cea6242c85 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.332961] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194254, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.333391] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 837.333391] env[62627]: value = "task-2194258" [ 837.333391] env[62627]: _type = "Task" [ 837.333391] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.369028] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d268a07-2f94-4c86-88ca-c164aa5fd0c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.375976] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194258, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.382504] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a43d6c0-7a03-4cc8-b95e-5b15005ff7be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.397665] env[62627]: DEBUG nova.compute.provider_tree [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.567222] env[62627]: DEBUG nova.network.neutron [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Updated VIF entry in instance network info cache for port abd8176d-4f10-4dcc-9b49-f53a48ec4cc4. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.568025] env[62627]: DEBUG nova.network.neutron [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Updating instance_info_cache with network_info: [{"id": "abd8176d-4f10-4dcc-9b49-f53a48ec4cc4", "address": "fa:16:3e:74:34:18", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabd8176d-4f", "ovs_interfaceid": "abd8176d-4f10-4dcc-9b49-f53a48ec4cc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.687885] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194257, 'name': CreateVM_Task, 'duration_secs': 0.355} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.688216] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 837.688721] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.688879] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.689211] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 837.689454] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f10b5f98-8a02-4655-aec9-c0dbc8ca985e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.693861] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 837.693861] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520a1a85-3a4d-eee4-c4e2-70993f1ea3a3" [ 837.693861] env[62627]: _type = "Task" [ 837.693861] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.701306] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520a1a85-3a4d-eee4-c4e2-70993f1ea3a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.792352] env[62627]: DEBUG oslo_vmware.api [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194254, 'name': PowerOnVM_Task, 'duration_secs': 1.257474} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.792611] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.792817] env[62627]: INFO nova.compute.manager [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Took 9.19 seconds to spawn the instance on the hypervisor. [ 837.792985] env[62627]: DEBUG nova.compute.manager [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 837.793776] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d319c32-fc00-41eb-98dc-326520a9a7fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.846468] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194258, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.901385] env[62627]: DEBUG nova.scheduler.client.report [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 838.070236] env[62627]: DEBUG oslo_concurrency.lockutils [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] Releasing lock "refresh_cache-cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.070510] env[62627]: DEBUG nova.compute.manager [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Received event network-vif-deleted-3b9cf75c-dd86-44d1-9123-1680cabc589d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 838.070691] env[62627]: INFO nova.compute.manager [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Neutron deleted interface 3b9cf75c-dd86-44d1-9123-1680cabc589d; detaching it from the instance and deleting it from the info cache [ 838.070866] env[62627]: DEBUG nova.network.neutron [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.204546] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520a1a85-3a4d-eee4-c4e2-70993f1ea3a3, 'name': SearchDatastore_Task, 'duration_secs': 0.051342} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.204885] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.205138] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 838.205662] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.205662] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.205759] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 838.205939] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-112ac2c9-d1da-482d-bdd4-46354376a2cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.217186] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 838.217367] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 838.218146] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27566440-118d-493c-83d7-ab2c2931ec28 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.223216] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 838.223216] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b5cf8f-a48e-558a-29ca-bcced019918d" [ 838.223216] env[62627]: _type = "Task" [ 838.223216] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.227014] env[62627]: DEBUG nova.compute.manager [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 838.234373] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b5cf8f-a48e-558a-29ca-bcced019918d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.247333] env[62627]: DEBUG nova.virt.hardware [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 838.247487] env[62627]: DEBUG nova.virt.hardware [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.247646] env[62627]: DEBUG nova.virt.hardware [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 838.247827] env[62627]: DEBUG nova.virt.hardware [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.247991] env[62627]: DEBUG nova.virt.hardware [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 838.248165] env[62627]: DEBUG nova.virt.hardware [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 838.248374] env[62627]: DEBUG nova.virt.hardware [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 838.248530] env[62627]: DEBUG nova.virt.hardware [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 838.248695] env[62627]: DEBUG nova.virt.hardware [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 838.248855] env[62627]: DEBUG nova.virt.hardware [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 838.249031] env[62627]: DEBUG nova.virt.hardware [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 838.249814] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba329c7d-f765-45ed-8fca-8308019a74ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.257176] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03757e0d-7601-436e-8a9c-a986da72e6c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.311853] env[62627]: INFO nova.compute.manager [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Took 42.92 seconds to build instance. [ 838.344214] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194258, 'name': ReconfigVM_Task, 'duration_secs': 0.576164} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.344483] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Reconfigured VM instance instance-00000021 to attach disk [datastore2] aa181094-cb78-44e0-8d7a-1d677e0af1fe/aa181094-cb78-44e0-8d7a-1d677e0af1fe.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.345110] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35b0f54d-9361-43c4-9cce-534313fb9b69 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.352057] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 838.352057] env[62627]: value = "task-2194259" [ 838.352057] env[62627]: _type = "Task" [ 838.352057] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.361594] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194259, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.407515] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.204s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.410094] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.452s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.410313] env[62627]: DEBUG nova.objects.instance [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lazy-loading 'resources' on Instance uuid 77418dff-6bdc-4e0a-954a-45c80960dbf1 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 838.438512] env[62627]: INFO nova.scheduler.client.report [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Deleted allocations for instance 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e [ 838.573764] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e3f2611-83f8-45c0-a81b-ee1f15df80d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.582936] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b26e168-897e-4d2c-aacf-60888aada3b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.618342] env[62627]: DEBUG nova.compute.manager [req-12f6fbd0-00ae-4a25-8049-fbf0ceb5251c req-70ed80ce-07d1-4726-8feb-507f8c5620f1 service nova] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Detach interface failed, port_id=3b9cf75c-dd86-44d1-9123-1680cabc589d, reason: Instance cbd30c37-f6c3-4229-8b95-855cab64924c could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 838.734873] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b5cf8f-a48e-558a-29ca-bcced019918d, 'name': SearchDatastore_Task, 'duration_secs': 0.031128} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.735742] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-121c171f-a75c-4991-8c56-264b8c8113e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.741622] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 838.741622] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52010e97-4eca-9ae6-7ee8-84a61ce952d1" [ 838.741622] env[62627]: _type = "Task" [ 838.741622] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.752016] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52010e97-4eca-9ae6-7ee8-84a61ce952d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.794984] env[62627]: DEBUG nova.compute.manager [req-9d23eb0a-1b28-4d24-8528-5634d32ed6a6 req-e169aadd-7841-4c28-909b-e2e0ce8aa93f service nova] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Received event network-vif-plugged-7b046d10-1606-48ca-9f84-f34e991b5dd5 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 838.795178] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d23eb0a-1b28-4d24-8528-5634d32ed6a6 req-e169aadd-7841-4c28-909b-e2e0ce8aa93f service nova] Acquiring lock "aed62160-2db5-4008-a491-2b27ae81ef5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.795419] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d23eb0a-1b28-4d24-8528-5634d32ed6a6 req-e169aadd-7841-4c28-909b-e2e0ce8aa93f service nova] Lock "aed62160-2db5-4008-a491-2b27ae81ef5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.795595] env[62627]: DEBUG oslo_concurrency.lockutils [req-9d23eb0a-1b28-4d24-8528-5634d32ed6a6 req-e169aadd-7841-4c28-909b-e2e0ce8aa93f service nova] Lock "aed62160-2db5-4008-a491-2b27ae81ef5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.795832] env[62627]: DEBUG nova.compute.manager [req-9d23eb0a-1b28-4d24-8528-5634d32ed6a6 req-e169aadd-7841-4c28-909b-e2e0ce8aa93f service nova] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] No waiting events found dispatching network-vif-plugged-7b046d10-1606-48ca-9f84-f34e991b5dd5 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 838.795953] env[62627]: WARNING nova.compute.manager [req-9d23eb0a-1b28-4d24-8528-5634d32ed6a6 req-e169aadd-7841-4c28-909b-e2e0ce8aa93f service nova] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Received unexpected event network-vif-plugged-7b046d10-1606-48ca-9f84-f34e991b5dd5 for instance with vm_state building and task_state spawning. [ 838.814342] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0944d6b-31bb-49d4-b0a5-05b5dc53a97e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.535s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.864052] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194259, 'name': Rename_Task, 'duration_secs': 0.341696} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.864052] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.864052] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a93439ab-b384-4a4c-92be-96d54726cfbc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.870134] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 838.870134] env[62627]: value = "task-2194260" [ 838.870134] env[62627]: _type = "Task" [ 838.870134] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.882730] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194260, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.911793] env[62627]: DEBUG nova.network.neutron [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Successfully updated port: 7b046d10-1606-48ca-9f84-f34e991b5dd5 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.948234] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d866bf92-8bfa-4630-9c60-2c8068361dd4 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.170s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.948234] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 30.900s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.948234] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.948234] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.948478] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.954643] env[62627]: INFO nova.compute.manager [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Terminating instance [ 839.256052] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52010e97-4eca-9ae6-7ee8-84a61ce952d1, 'name': SearchDatastore_Task, 'duration_secs': 0.028116} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.256386] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.256643] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a/cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 839.256911] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64387dc0-dfc9-4ca4-aff0-0f459cb4b44a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.268021] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 839.268021] env[62627]: value = "task-2194261" [ 839.268021] env[62627]: _type = "Task" [ 839.268021] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.276707] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194261, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.317510] env[62627]: DEBUG nova.compute.manager [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 839.380738] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194260, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.419290] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "refresh_cache-aed62160-2db5-4008-a491-2b27ae81ef5f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.419450] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquired lock "refresh_cache-aed62160-2db5-4008-a491-2b27ae81ef5f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.419595] env[62627]: DEBUG nova.network.neutron [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 839.460660] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.460879] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquired lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.461093] env[62627]: DEBUG nova.network.neutron [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 839.466557] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61887443-d56e-4f17-9658-331d7eacb5d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.476777] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328f91b0-538d-4c71-9909-2dbffa340a40 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.481980] env[62627]: DEBUG nova.compute.manager [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.482761] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d787adb0-0a47-429d-b0a6-d5c7cae865ad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.515569] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f798a7a1-7eb9-400a-9158-61ca1fffd3bb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.527934] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb11165-738a-4cc8-80cd-091e2b02aa1e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.547627] env[62627]: DEBUG nova.compute.provider_tree [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 839.778818] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194261, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.840494] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.883990] env[62627]: DEBUG oslo_vmware.api [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194260, 'name': PowerOnVM_Task, 'duration_secs': 0.717815} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.884382] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.884654] env[62627]: INFO nova.compute.manager [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Took 8.54 seconds to spawn the instance on the hypervisor. [ 839.884897] env[62627]: DEBUG nova.compute.manager [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.886009] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96796e1-a35d-48ae-b73e-605cb4545258 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.961149] env[62627]: DEBUG nova.network.neutron [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 839.963965] env[62627]: DEBUG nova.compute.utils [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Can not refresh info_cache because instance was not found {{(pid=62627) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1055}} [ 839.992311] env[62627]: DEBUG nova.network.neutron [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.028753] env[62627]: INFO nova.compute.manager [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] instance snapshotting [ 840.031344] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac418a32-5bc3-455c-8cdc-8a75168f44b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.050835] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44bdff9a-d102-4f00-814f-9c01b1929ccc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.054075] env[62627]: DEBUG nova.scheduler.client.report [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 840.080958] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "a674db15-ea4e-44c8-abc1-6af425286517" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.080958] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "a674db15-ea4e-44c8-abc1-6af425286517" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.081180] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "a674db15-ea4e-44c8-abc1-6af425286517-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.082085] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "a674db15-ea4e-44c8-abc1-6af425286517-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.082085] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "a674db15-ea4e-44c8-abc1-6af425286517-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.083859] env[62627]: INFO nova.compute.manager [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Terminating instance [ 840.217220] env[62627]: DEBUG nova.network.neutron [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.258122] env[62627]: DEBUG nova.network.neutron [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Updating instance_info_cache with network_info: [{"id": "7b046d10-1606-48ca-9f84-f34e991b5dd5", "address": "fa:16:3e:15:ae:64", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.124", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b046d10-16", "ovs_interfaceid": "7b046d10-1606-48ca-9f84-f34e991b5dd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.280798] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194261, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584567} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.281242] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a/cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 840.281910] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 840.282026] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce3a4a44-ed10-478c-94aa-32aa0b4b9869 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.290450] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 840.290450] env[62627]: value = "task-2194262" [ 840.290450] env[62627]: _type = "Task" [ 840.290450] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.303562] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194262, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.404843] env[62627]: INFO nova.compute.manager [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Took 42.73 seconds to build instance. [ 840.558856] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.149s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.562509] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.346s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.563227] env[62627]: INFO nova.compute.claims [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 840.566443] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 840.566917] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3a3fb603-5ffa-400c-b7de-c6ef64843884 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.574873] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 840.574873] env[62627]: value = "task-2194263" [ 840.574873] env[62627]: _type = "Task" [ 840.574873] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.584039] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194263, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.585015] env[62627]: INFO nova.scheduler.client.report [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Deleted allocations for instance 77418dff-6bdc-4e0a-954a-45c80960dbf1 [ 840.589573] env[62627]: DEBUG nova.compute.manager [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 840.589573] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 840.589573] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69f982e-b448-4696-9cee-83a22bef2ae3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.596808] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 840.596939] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-751265d6-e8c7-490c-9d72-8c061ddba370 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.603154] env[62627]: DEBUG oslo_vmware.api [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 840.603154] env[62627]: value = "task-2194264" [ 840.603154] env[62627]: _type = "Task" [ 840.603154] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.615306] env[62627]: DEBUG oslo_vmware.api [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194264, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.719722] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Releasing lock "refresh_cache-2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.720166] env[62627]: DEBUG nova.compute.manager [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 840.720407] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 840.720723] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23aa728c-36a4-4a40-9a54-6a1c629c5a09 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.732102] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83217553-5083-4960-8bd0-6349f5c1e228 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.765111] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Releasing lock "refresh_cache-aed62160-2db5-4008-a491-2b27ae81ef5f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.765442] env[62627]: DEBUG nova.compute.manager [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Instance network_info: |[{"id": "7b046d10-1606-48ca-9f84-f34e991b5dd5", "address": "fa:16:3e:15:ae:64", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.124", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b046d10-16", "ovs_interfaceid": "7b046d10-1606-48ca-9f84-f34e991b5dd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 840.766228] env[62627]: WARNING nova.virt.vmwareapi.vmops [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e could not be found. [ 840.766572] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 840.766625] env[62627]: INFO nova.compute.manager [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 840.766823] env[62627]: DEBUG oslo.service.loopingcall [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.767235] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:ae:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b046d10-1606-48ca-9f84-f34e991b5dd5', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.774656] env[62627]: DEBUG oslo.service.loopingcall [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.774862] env[62627]: DEBUG nova.compute.manager [-] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 840.774960] env[62627]: DEBUG nova.network.neutron [-] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 840.776761] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.776995] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a73bfe9d-e61b-4c85-a2cd-7dbdba1d42b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.800246] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194262, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064356} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.802087] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 840.802087] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.802087] env[62627]: value = "task-2194265" [ 840.802087] env[62627]: _type = "Task" [ 840.802087] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.802780] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57988bd5-d759-43ee-9262-d6d42b29625b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.814072] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194265, 'name': CreateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.824650] env[62627]: DEBUG nova.network.neutron [-] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.835990] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a/cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 840.838386] env[62627]: DEBUG nova.network.neutron [-] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.839583] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ba7b0b6-5085-49f4-9a37-0ac99daef317 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.860275] env[62627]: DEBUG nova.compute.manager [req-4daea85e-f884-4d28-a1f7-3a5aeed43438 req-cb983009-1477-42d2-9f90-53d74cb10f3b service nova] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Received event network-changed-7b046d10-1606-48ca-9f84-f34e991b5dd5 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 840.860523] env[62627]: DEBUG nova.compute.manager [req-4daea85e-f884-4d28-a1f7-3a5aeed43438 req-cb983009-1477-42d2-9f90-53d74cb10f3b service nova] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Refreshing instance network info cache due to event network-changed-7b046d10-1606-48ca-9f84-f34e991b5dd5. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 840.860847] env[62627]: DEBUG oslo_concurrency.lockutils [req-4daea85e-f884-4d28-a1f7-3a5aeed43438 req-cb983009-1477-42d2-9f90-53d74cb10f3b service nova] Acquiring lock "refresh_cache-aed62160-2db5-4008-a491-2b27ae81ef5f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.861033] env[62627]: DEBUG oslo_concurrency.lockutils [req-4daea85e-f884-4d28-a1f7-3a5aeed43438 req-cb983009-1477-42d2-9f90-53d74cb10f3b service nova] Acquired lock "refresh_cache-aed62160-2db5-4008-a491-2b27ae81ef5f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.861201] env[62627]: DEBUG nova.network.neutron [req-4daea85e-f884-4d28-a1f7-3a5aeed43438 req-cb983009-1477-42d2-9f90-53d74cb10f3b service nova] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Refreshing network info cache for port 7b046d10-1606-48ca-9f84-f34e991b5dd5 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.864085] env[62627]: INFO nova.compute.manager [-] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Took 0.09 seconds to deallocate network for instance. [ 840.864475] env[62627]: INFO nova.compute.manager [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Rescuing [ 840.864699] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "refresh_cache-aa181094-cb78-44e0-8d7a-1d677e0af1fe" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.864904] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "refresh_cache-aa181094-cb78-44e0-8d7a-1d677e0af1fe" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.865084] env[62627]: DEBUG nova.network.neutron [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.877467] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 840.877467] env[62627]: value = "task-2194266" [ 840.877467] env[62627]: _type = "Task" [ 840.877467] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.888709] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194266, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.907582] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25726b59-90a5-4950-a311-d646bc9578fd tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.566s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.089120] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194263, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.096879] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e38d84a-9513-4503-805a-6b92d68b7e43 tempest-ServersAdminNegativeTestJSON-337070548 tempest-ServersAdminNegativeTestJSON-337070548-project-member] Lock "77418dff-6bdc-4e0a-954a-45c80960dbf1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.249s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.116026] env[62627]: DEBUG oslo_vmware.api [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194264, 'name': PowerOffVM_Task, 'duration_secs': 0.261906} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.116414] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 841.116558] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 841.116713] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ba350d4-a594-484b-8601-3d153ce17350 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.212394] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 841.212622] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 841.212802] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Deleting the datastore file [datastore2] a674db15-ea4e-44c8-abc1-6af425286517 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 841.213080] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4431914-d4b4-4884-bc95-a8687ec48a7b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.225148] env[62627]: DEBUG oslo_vmware.api [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for the task: (returnval){ [ 841.225148] env[62627]: value = "task-2194268" [ 841.225148] env[62627]: _type = "Task" [ 841.225148] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.233074] env[62627]: DEBUG oslo_vmware.api [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194268, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.315137] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194265, 'name': CreateVM_Task, 'duration_secs': 0.343037} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.315298] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 841.315998] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.316217] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.316512] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 841.316755] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a80e806e-b53f-425f-a3e8-9e063b501d2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.321218] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 841.321218] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c4ca84-5b26-874f-37a4-bc6783fa5c03" [ 841.321218] env[62627]: _type = "Task" [ 841.321218] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.328942] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c4ca84-5b26-874f-37a4-bc6783fa5c03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.376981] env[62627]: INFO nova.compute.manager [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Instance disappeared during terminate [ 841.377180] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3f3bedf8-9882-47c1-90d9-3393199d40ad tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "2cfcbc3b-c2b9-4155-a48a-0e73eea0589e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.431s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.389590] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194266, 'name': ReconfigVM_Task, 'duration_secs': 0.354607} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.389879] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Reconfigured VM instance instance-00000022 to attach disk [datastore2] cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a/cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 841.390523] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6910f5d-804d-4605-bbd9-158b40f84155 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.398240] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 841.398240] env[62627]: value = "task-2194269" [ 841.398240] env[62627]: _type = "Task" [ 841.398240] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.409167] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194269, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.410239] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 841.585822] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194263, 'name': CreateSnapshot_Task, 'duration_secs': 0.591012} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.586111] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 841.586848] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6f82f0-bdd0-49ca-9380-073466402cfb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.740039] env[62627]: DEBUG oslo_vmware.api [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Task: {'id': task-2194268, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179677} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.740351] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 841.740567] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 841.740776] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 841.740976] env[62627]: INFO nova.compute.manager [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Took 1.15 seconds to destroy the instance on the hypervisor. [ 841.741286] env[62627]: DEBUG oslo.service.loopingcall [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.741521] env[62627]: DEBUG nova.compute.manager [-] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 841.741665] env[62627]: DEBUG nova.network.neutron [-] [instance: a674db15-ea4e-44c8-abc1-6af425286517] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 841.826788] env[62627]: DEBUG nova.network.neutron [req-4daea85e-f884-4d28-a1f7-3a5aeed43438 req-cb983009-1477-42d2-9f90-53d74cb10f3b service nova] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Updated VIF entry in instance network info cache for port 7b046d10-1606-48ca-9f84-f34e991b5dd5. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.828830] env[62627]: DEBUG nova.network.neutron [req-4daea85e-f884-4d28-a1f7-3a5aeed43438 req-cb983009-1477-42d2-9f90-53d74cb10f3b service nova] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Updating instance_info_cache with network_info: [{"id": "7b046d10-1606-48ca-9f84-f34e991b5dd5", "address": "fa:16:3e:15:ae:64", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.124", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b046d10-16", "ovs_interfaceid": "7b046d10-1606-48ca-9f84-f34e991b5dd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.840246] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c4ca84-5b26-874f-37a4-bc6783fa5c03, 'name': SearchDatastore_Task, 'duration_secs': 0.009126} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.840787] env[62627]: DEBUG nova.network.neutron [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Updating instance_info_cache with network_info: [{"id": "4eb9bc28-5e4b-4b2e-859d-31b930210e13", "address": "fa:16:3e:fc:b7:08", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4eb9bc28-5e", "ovs_interfaceid": "4eb9bc28-5e4b-4b2e-859d-31b930210e13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.846423] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.846646] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.846872] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.847028] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.847204] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.848367] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9cda1ea1-320a-4f78-b3f2-e60f047c1a2e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.858371] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.858566] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.859307] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fed9eac-4063-4c1b-a366-0e98115dc2a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.867113] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 841.867113] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521212f1-a558-de7b-a6ae-e598aefb63ea" [ 841.867113] env[62627]: _type = "Task" [ 841.867113] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.880762] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521212f1-a558-de7b-a6ae-e598aefb63ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.911567] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194269, 'name': Rename_Task, 'duration_secs': 0.155994} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.911910] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 841.912089] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a382d3ce-d699-4cd7-afbd-d5da72669e1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.921384] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 841.921384] env[62627]: value = "task-2194270" [ 841.921384] env[62627]: _type = "Task" [ 841.921384] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.930722] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194270, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.937027] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.108167] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 842.110040] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d0881bce-df88-44ed-a8ef-5740e95a0101 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.117514] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 842.117514] env[62627]: value = "task-2194271" [ 842.117514] env[62627]: _type = "Task" [ 842.117514] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.128684] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194271, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.176099] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c389ac-a1a7-4292-afb7-c98835336947 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.182314] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f49237b-a009-469c-aeb9-0021c43571bd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.218479] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0c692a-6b2c-4c8c-a92b-c7d983c9a3a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.226780] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893156cb-3e25-4d93-8509-42e367e3cf9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.242259] env[62627]: DEBUG nova.compute.provider_tree [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.329925] env[62627]: DEBUG oslo_concurrency.lockutils [req-4daea85e-f884-4d28-a1f7-3a5aeed43438 req-cb983009-1477-42d2-9f90-53d74cb10f3b service nova] Releasing lock "refresh_cache-aed62160-2db5-4008-a491-2b27ae81ef5f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.349199] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "refresh_cache-aa181094-cb78-44e0-8d7a-1d677e0af1fe" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.390809] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521212f1-a558-de7b-a6ae-e598aefb63ea, 'name': SearchDatastore_Task, 'duration_secs': 0.012575} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.392456] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4044743-2111-4e8d-9aac-4748a319e2e5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.400267] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 842.400267] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cb59db-6ab6-7b5e-87ad-20556542daf8" [ 842.400267] env[62627]: _type = "Task" [ 842.400267] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.409378] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cb59db-6ab6-7b5e-87ad-20556542daf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.432248] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194270, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.634519] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194271, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.745942] env[62627]: DEBUG nova.scheduler.client.report [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 842.806976] env[62627]: DEBUG nova.network.neutron [-] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.912328] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cb59db-6ab6-7b5e-87ad-20556542daf8, 'name': SearchDatastore_Task, 'duration_secs': 0.041906} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.912649] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.912961] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] aed62160-2db5-4008-a491-2b27ae81ef5f/aed62160-2db5-4008-a491-2b27ae81ef5f.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.913300] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0cea265-6881-4dda-8d12-86d1163e7fc0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.921019] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 842.921019] env[62627]: value = "task-2194272" [ 842.921019] env[62627]: _type = "Task" [ 842.921019] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.926381] env[62627]: DEBUG nova.compute.manager [req-c8cecb98-80d8-4762-85e8-a77732e17a49 req-fb9cd120-6a11-4e31-af53-06fa20b8e61d service nova] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Received event network-vif-deleted-246fd9bd-81de-4162-b744-58c17169a80f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 842.935326] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.938649] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194270, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.128878] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194271, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.251389] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.252082] env[62627]: DEBUG nova.compute.manager [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 843.256948] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.245s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.259070] env[62627]: DEBUG nova.objects.instance [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 843.312580] env[62627]: INFO nova.compute.manager [-] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Took 1.57 seconds to deallocate network for instance. [ 843.434312] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194272, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.437517] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194270, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.631837] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194271, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.764017] env[62627]: DEBUG nova.compute.utils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 843.765475] env[62627]: DEBUG nova.compute.manager [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 843.765645] env[62627]: DEBUG nova.network.neutron [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 843.817742] env[62627]: DEBUG nova.policy [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ad9d0552ff44c91a4aa5f8cf44b981d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73aa22242f52456f90a63516c2259102', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 843.819916] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.891625] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 843.891625] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6995bce-18d4-4cfd-b5f3-a0a9c19ce981 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.899615] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 843.899615] env[62627]: value = "task-2194273" [ 843.899615] env[62627]: _type = "Task" [ 843.899615] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.909259] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194273, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.948217] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194272, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535016} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.948493] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194270, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.948716] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] aed62160-2db5-4008-a491-2b27ae81ef5f/aed62160-2db5-4008-a491-2b27ae81ef5f.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.948920] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.949390] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88575719-5689-4ae0-a3d2-cccda2d51bc1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.956162] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 843.956162] env[62627]: value = "task-2194274" [ 843.956162] env[62627]: _type = "Task" [ 843.956162] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.965106] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.130959] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194271, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.259831] env[62627]: DEBUG nova.network.neutron [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Successfully created port: 499d7862-fc13-4d2b-8c15-6eab541a13f1 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.271700] env[62627]: DEBUG nova.compute.manager [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 844.275931] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ea9c11a1-db48-4573-bd7a-021dd30128d9 tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.276603] env[62627]: DEBUG oslo_concurrency.lockutils [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.416s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.276603] env[62627]: DEBUG nova.objects.instance [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Lazy-loading 'resources' on Instance uuid fffa23fe-3ce2-4879-b72d-11275d298504 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.409820] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194273, 'name': PowerOffVM_Task, 'duration_secs': 0.472631} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.410107] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 844.410880] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5044e275-9b8d-4fe0-887b-5d0300bfffe4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.439924] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73130451-5d7f-4ac8-99bf-4aa310aa164c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.448575] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194270, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.467216] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.484066] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 844.484401] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab01da44-56f0-4e13-a7b4-ce57faeaaff0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.491512] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 844.491512] env[62627]: value = "task-2194275" [ 844.491512] env[62627]: _type = "Task" [ 844.491512] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.500210] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 844.500429] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 844.500649] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.500781] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.500960] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 844.501460] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-678ce35e-5ef7-4da4-b167-97f45b49c78e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.513158] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 844.513355] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 844.514291] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99152d5b-eac6-4260-b69e-c29a98427ce0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.522286] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 844.522286] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528cf888-78f7-eb13-642b-6e019dca728f" [ 844.522286] env[62627]: _type = "Task" [ 844.522286] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.527818] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528cf888-78f7-eb13-642b-6e019dca728f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.629754] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194271, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.947945] env[62627]: DEBUG oslo_vmware.api [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194270, 'name': PowerOnVM_Task, 'duration_secs': 2.918612} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.948262] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.948611] env[62627]: INFO nova.compute.manager [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Took 10.80 seconds to spawn the instance on the hypervisor. [ 844.948611] env[62627]: DEBUG nova.compute.manager [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 844.949567] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da32768-42f7-4b76-b68b-0ace97b1661f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.974906] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194274, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.731056} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.974906] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.975879] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd2690b-9226-464b-8786-3ed184f5e121 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.001349] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] aed62160-2db5-4008-a491-2b27ae81ef5f/aed62160-2db5-4008-a491-2b27ae81ef5f.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.004351] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f4bebdb-31ce-4c8c-bb35-b381e513fb08 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.025699] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 845.025699] env[62627]: value = "task-2194276" [ 845.025699] env[62627]: _type = "Task" [ 845.025699] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.034451] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528cf888-78f7-eb13-642b-6e019dca728f, 'name': SearchDatastore_Task, 'duration_secs': 0.029871} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.038751] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25139c3d-bfbe-4f86-ba6f-e92c6219b2aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.043864] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194276, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.046749] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 845.046749] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c643ff-8c62-82a7-817b-a3d16f897be8" [ 845.046749] env[62627]: _type = "Task" [ 845.046749] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.059497] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c643ff-8c62-82a7-817b-a3d16f897be8, 'name': SearchDatastore_Task, 'duration_secs': 0.008903} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.059761] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.060033] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] aa181094-cb78-44e0-8d7a-1d677e0af1fe/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. {{(pid=62627) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 845.060296] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-995c23bd-a1a3-4e8e-a96d-e1380e946178 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.066553] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 845.066553] env[62627]: value = "task-2194277" [ 845.066553] env[62627]: _type = "Task" [ 845.066553] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.082950] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquiring lock "7804914c-7df8-45c2-a934-67aa73afb2e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.082950] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "7804914c-7df8-45c2-a934-67aa73afb2e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.082950] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194277, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.134256] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194271, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.286987] env[62627]: DEBUG nova.compute.manager [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 845.312351] env[62627]: DEBUG nova.virt.hardware [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 845.312646] env[62627]: DEBUG nova.virt.hardware [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.312804] env[62627]: DEBUG nova.virt.hardware [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 845.312989] env[62627]: DEBUG nova.virt.hardware [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.313153] env[62627]: DEBUG nova.virt.hardware [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 845.313304] env[62627]: DEBUG nova.virt.hardware [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 845.313520] env[62627]: DEBUG nova.virt.hardware [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 845.313680] env[62627]: DEBUG nova.virt.hardware [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 845.313947] env[62627]: DEBUG nova.virt.hardware [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 845.314139] env[62627]: DEBUG nova.virt.hardware [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 845.314318] env[62627]: DEBUG nova.virt.hardware [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 845.315251] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57de40f1-2c7b-44f8-942c-e990ec34577e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.330108] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0f3d17-f093-481e-9437-2f41680a09f3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.404171] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c79e14-9db2-4e82-9d7e-bd6640584c4c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.414037] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aedae832-85e5-419e-ba45-53f4d01d8f70 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.451776] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f7fe49-4fcd-4495-aaf4-3ddfbcde994c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.461738] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e421b3-4d9e-4adb-a8de-892ffe3e450c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.487927] env[62627]: DEBUG nova.compute.provider_tree [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.491025] env[62627]: INFO nova.compute.manager [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Took 45.28 seconds to build instance. [ 845.493371] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Acquiring lock "e5712962-ad8e-4cf2-aa47-275293609a13" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.493612] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Lock "e5712962-ad8e-4cf2-aa47-275293609a13" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.493834] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Acquiring lock "e5712962-ad8e-4cf2-aa47-275293609a13-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.494598] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Lock "e5712962-ad8e-4cf2-aa47-275293609a13-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.494913] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Lock "e5712962-ad8e-4cf2-aa47-275293609a13-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.499300] env[62627]: INFO nova.compute.manager [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Terminating instance [ 845.537184] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194276, 'name': ReconfigVM_Task, 'duration_secs': 0.478437} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.538015] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Reconfigured VM instance instance-00000023 to attach disk [datastore2] aed62160-2db5-4008-a491-2b27ae81ef5f/aed62160-2db5-4008-a491-2b27ae81ef5f.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.538137] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15da4a67-a472-49af-b23a-9de11246f2f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.546484] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 845.546484] env[62627]: value = "task-2194278" [ 845.546484] env[62627]: _type = "Task" [ 845.546484] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.557663] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194278, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.577996] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194277, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.631819] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194271, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.773639] env[62627]: DEBUG nova.compute.manager [req-d43c6a3d-0802-46a7-b5d1-b4f94a9f727b req-c3fd6f03-84fe-48aa-b2b6-22208918e0d1 service nova] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Received event network-vif-plugged-499d7862-fc13-4d2b-8c15-6eab541a13f1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 845.773918] env[62627]: DEBUG oslo_concurrency.lockutils [req-d43c6a3d-0802-46a7-b5d1-b4f94a9f727b req-c3fd6f03-84fe-48aa-b2b6-22208918e0d1 service nova] Acquiring lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.774280] env[62627]: DEBUG oslo_concurrency.lockutils [req-d43c6a3d-0802-46a7-b5d1-b4f94a9f727b req-c3fd6f03-84fe-48aa-b2b6-22208918e0d1 service nova] Lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.774449] env[62627]: DEBUG oslo_concurrency.lockutils [req-d43c6a3d-0802-46a7-b5d1-b4f94a9f727b req-c3fd6f03-84fe-48aa-b2b6-22208918e0d1 service nova] Lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.774609] env[62627]: DEBUG nova.compute.manager [req-d43c6a3d-0802-46a7-b5d1-b4f94a9f727b req-c3fd6f03-84fe-48aa-b2b6-22208918e0d1 service nova] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] No waiting events found dispatching network-vif-plugged-499d7862-fc13-4d2b-8c15-6eab541a13f1 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 845.774779] env[62627]: WARNING nova.compute.manager [req-d43c6a3d-0802-46a7-b5d1-b4f94a9f727b req-c3fd6f03-84fe-48aa-b2b6-22208918e0d1 service nova] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Received unexpected event network-vif-plugged-499d7862-fc13-4d2b-8c15-6eab541a13f1 for instance with vm_state building and task_state spawning. [ 845.883901] env[62627]: DEBUG nova.network.neutron [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Successfully updated port: 499d7862-fc13-4d2b-8c15-6eab541a13f1 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 845.999610] env[62627]: DEBUG nova.scheduler.client.report [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 846.004209] env[62627]: DEBUG nova.compute.manager [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 846.004478] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 846.004682] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d83ab35-b271-46c5-9916-c7c31d8c9591 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.008195] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1426df85-0b55-489d-b7c7-7339f4925e9c tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.003s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.013897] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Waiting for the task: (returnval){ [ 846.013897] env[62627]: value = "task-2194279" [ 846.013897] env[62627]: _type = "Task" [ 846.013897] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.024204] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.055992] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194278, 'name': Rename_Task, 'duration_secs': 0.171325} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.056289] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.056530] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-797bd0df-6fd2-47cc-8d8d-cd6393a4a902 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.066088] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 846.066088] env[62627]: value = "task-2194280" [ 846.066088] env[62627]: _type = "Task" [ 846.066088] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.078569] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194280, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.083255] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194277, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524933} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.083536] env[62627]: INFO nova.virt.vmwareapi.ds_util [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] aa181094-cb78-44e0-8d7a-1d677e0af1fe/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. [ 846.084813] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f73a43-a860-4d06-9b15-c986ee635312 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.109170] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] aa181094-cb78-44e0-8d7a-1d677e0af1fe/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 846.109478] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3271a847-3d76-4027-85e7-33cf56da2497 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.127923] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 846.127923] env[62627]: value = "task-2194281" [ 846.127923] env[62627]: _type = "Task" [ 846.127923] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.134535] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194271, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.140330] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194281, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.388233] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquiring lock "refresh_cache-32a3b8bb-7c3a-4e30-8a50-37ce53e71882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.388233] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquired lock "refresh_cache-32a3b8bb-7c3a-4e30-8a50-37ce53e71882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.388393] env[62627]: DEBUG nova.network.neutron [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 846.504905] env[62627]: DEBUG oslo_concurrency.lockutils [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.229s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.507837] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.167s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.508081] env[62627]: DEBUG nova.objects.instance [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lazy-loading 'resources' on Instance uuid 83998771-97a7-4458-834e-df1e4b2914ee {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 846.510804] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 846.525551] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194279, 'name': PowerOffVM_Task, 'duration_secs': 0.222138} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.525551] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 846.525551] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 846.525715] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447551', 'volume_id': '51233852-f0e4-4bab-903c-b0575335358d', 'name': 'volume-51233852-f0e4-4bab-903c-b0575335358d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e5712962-ad8e-4cf2-aa47-275293609a13', 'attached_at': '', 'detached_at': '', 'volume_id': '51233852-f0e4-4bab-903c-b0575335358d', 'serial': '51233852-f0e4-4bab-903c-b0575335358d'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 846.526619] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7462a9f4-72a0-42be-aa15-722bb3538617 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.532203] env[62627]: INFO nova.scheduler.client.report [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Deleted allocations for instance fffa23fe-3ce2-4879-b72d-11275d298504 [ 846.547244] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94462386-3565-46a4-8335-c846517ae231 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.556720] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171ec782-4959-472b-aa66-b067feb56aa4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.578020] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45c0db3-924a-4e84-b1fd-caee48a216fb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.587277] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194280, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.597814] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] The volume has not been displaced from its original location: [datastore2] volume-51233852-f0e4-4bab-903c-b0575335358d/volume-51233852-f0e4-4bab-903c-b0575335358d.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 846.604075] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Reconfiguring VM instance instance-00000011 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 846.605051] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64fce4ef-48c6-4197-be5f-76b99c854513 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.624019] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Waiting for the task: (returnval){ [ 846.624019] env[62627]: value = "task-2194282" [ 846.624019] env[62627]: _type = "Task" [ 846.624019] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.637051] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194271, 'name': CloneVM_Task, 'duration_secs': 4.460401} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.640506] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Created linked-clone VM from snapshot [ 846.640776] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194282, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.641506] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d000172-d6f0-4a32-922e-0093dba4eacb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.646832] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194281, 'name': ReconfigVM_Task, 'duration_secs': 0.326278} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.648172] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Reconfigured VM instance instance-00000021 to attach disk [datastore2] aa181094-cb78-44e0-8d7a-1d677e0af1fe/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.648320] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a336283a-4826-45c0-b263-b822620c6d99 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.654052] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Uploading image c4bcb9e5-6a75-4d8c-9f62-99a25e841d24 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 846.682690] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbd63fbf-a329-4c7f-9c3e-c0b94a07de5e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.699748] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 846.699748] env[62627]: value = "task-2194283" [ 846.699748] env[62627]: _type = "Task" [ 846.699748] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.701723] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 846.701723] env[62627]: value = "vm-447638" [ 846.701723] env[62627]: _type = "VirtualMachine" [ 846.701723] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 846.702058] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-115b4b71-b00b-4df9-b5b7-56824d1441f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.718194] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lease: (returnval){ [ 846.718194] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e34d0d-5c7d-a6e0-b951-f03745227bce" [ 846.718194] env[62627]: _type = "HttpNfcLease" [ 846.718194] env[62627]: } obtained for exporting VM: (result){ [ 846.718194] env[62627]: value = "vm-447638" [ 846.718194] env[62627]: _type = "VirtualMachine" [ 846.718194] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 846.718194] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the lease: (returnval){ [ 846.718194] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e34d0d-5c7d-a6e0-b951-f03745227bce" [ 846.718194] env[62627]: _type = "HttpNfcLease" [ 846.718194] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 846.718194] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194283, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.725291] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 846.725291] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e34d0d-5c7d-a6e0-b951-f03745227bce" [ 846.725291] env[62627]: _type = "HttpNfcLease" [ 846.725291] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 846.921448] env[62627]: DEBUG nova.network.neutron [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 847.050884] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.055843] env[62627]: DEBUG oslo_concurrency.lockutils [None req-03afe6a7-56ac-4412-a977-1ae075768e01 tempest-ServerRescueTestJSONUnderV235-139392927 tempest-ServerRescueTestJSONUnderV235-139392927-project-member] Lock "fffa23fe-3ce2-4879-b72d-11275d298504" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.232s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.090303] env[62627]: DEBUG oslo_vmware.api [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194280, 'name': PowerOnVM_Task, 'duration_secs': 0.747115} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.090583] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.090799] env[62627]: INFO nova.compute.manager [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Took 8.86 seconds to spawn the instance on the hypervisor. [ 847.091106] env[62627]: DEBUG nova.compute.manager [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 847.092077] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4138965e-c9ec-455c-9839-334b30283f58 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.107829] env[62627]: DEBUG nova.network.neutron [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Updating instance_info_cache with network_info: [{"id": "499d7862-fc13-4d2b-8c15-6eab541a13f1", "address": "fa:16:3e:a1:f0:e6", "network": {"id": "c6063e60-287a-43ae-92a1-4006c8814395", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1063732032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73aa22242f52456f90a63516c2259102", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap499d7862-fc", "ovs_interfaceid": "499d7862-fc13-4d2b-8c15-6eab541a13f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.137935] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194282, 'name': ReconfigVM_Task, 'duration_secs': 0.185854} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.138347] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Reconfigured VM instance instance-00000011 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 847.145387] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd7c0dfc-46d1-4e08-9265-75133b548e1d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.162562] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Waiting for the task: (returnval){ [ 847.162562] env[62627]: value = "task-2194285" [ 847.162562] env[62627]: _type = "Task" [ 847.162562] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.182163] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194285, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.213982] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194283, 'name': ReconfigVM_Task, 'duration_secs': 0.344279} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.214459] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.214788] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7b3a384-c8cd-474a-9073-c9791323cf27 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.223328] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 847.223328] env[62627]: value = "task-2194286" [ 847.223328] env[62627]: _type = "Task" [ 847.223328] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.227711] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 847.227711] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e34d0d-5c7d-a6e0-b951-f03745227bce" [ 847.227711] env[62627]: _type = "HttpNfcLease" [ 847.227711] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 847.228302] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 847.228302] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e34d0d-5c7d-a6e0-b951-f03745227bce" [ 847.228302] env[62627]: _type = "HttpNfcLease" [ 847.228302] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 847.229039] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb938a57-c535-416e-b0da-d9552de562ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.236489] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194286, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.242284] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c48df6-f4fd-0261-dc27-112dbccba361/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 847.242464] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c48df6-f4fd-0261-dc27-112dbccba361/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 847.365954] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ad2fb4ee-839e-4b1b-887b-f7f86a4342da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.614205] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Releasing lock "refresh_cache-32a3b8bb-7c3a-4e30-8a50-37ce53e71882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.614205] env[62627]: DEBUG nova.compute.manager [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Instance network_info: |[{"id": "499d7862-fc13-4d2b-8c15-6eab541a13f1", "address": "fa:16:3e:a1:f0:e6", "network": {"id": "c6063e60-287a-43ae-92a1-4006c8814395", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1063732032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73aa22242f52456f90a63516c2259102", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap499d7862-fc", "ovs_interfaceid": "499d7862-fc13-4d2b-8c15-6eab541a13f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 847.615644] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:f0:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '499d7862-fc13-4d2b-8c15-6eab541a13f1', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.623015] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Creating folder: Project (73aa22242f52456f90a63516c2259102). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.626098] env[62627]: INFO nova.compute.manager [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Took 44.89 seconds to build instance. [ 847.627073] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9db453c6-f723-4ea8-b3d7-dec483fbeb63 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.640057] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Created folder: Project (73aa22242f52456f90a63516c2259102) in parent group-v447541. [ 847.640220] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Creating folder: Instances. Parent ref: group-v447639. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.640458] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06642edf-fbf0-4ada-aa4d-3936affb9904 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.653873] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Created folder: Instances in parent group-v447639. [ 847.654092] env[62627]: DEBUG oslo.service.loopingcall [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.654235] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 847.654445] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0332403-ccbf-4a34-80de-faeb804a477e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.686219] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194285, 'name': ReconfigVM_Task, 'duration_secs': 0.263391} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.686355] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447551', 'volume_id': '51233852-f0e4-4bab-903c-b0575335358d', 'name': 'volume-51233852-f0e4-4bab-903c-b0575335358d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e5712962-ad8e-4cf2-aa47-275293609a13', 'attached_at': '', 'detached_at': '', 'volume_id': '51233852-f0e4-4bab-903c-b0575335358d', 'serial': '51233852-f0e4-4bab-903c-b0575335358d'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 847.686683] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 847.686983] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 847.686983] env[62627]: value = "task-2194289" [ 847.686983] env[62627]: _type = "Task" [ 847.686983] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.687739] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cfaa0f-43e1-459f-bda4-1218b510674b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.700327] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.703737] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71ea4a7d-20ae-4cd4-b21d-c87f213e4058 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.705461] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194289, 'name': CreateVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.735365] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194286, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.740621] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa6e09a-8cfa-4c98-9cdf-e5469c2d6cdc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.749813] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9598fe-c82e-40d2-b308-b5f85c63c79d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.795133] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ddc403-6684-463c-98e7-ef2c21ec9fd1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.798362] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.798620] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.798819] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Deleting the datastore file [datastore2] e5712962-ad8e-4cf2-aa47-275293609a13 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.799355] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d1bf424-8b77-4af2-a0c9-3b9877ecf05e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.813139] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f59522d-52a2-4d9c-9dc9-dbd28e56a588 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.818634] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Waiting for the task: (returnval){ [ 847.818634] env[62627]: value = "task-2194291" [ 847.818634] env[62627]: _type = "Task" [ 847.818634] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.834594] env[62627]: DEBUG nova.compute.provider_tree [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.837568] env[62627]: DEBUG nova.compute.manager [req-e38d3c12-7b46-4d2b-a52c-8a29f2c35a34 req-5a7415f9-402c-4853-8aa3-f5c2041f02bd service nova] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Received event network-changed-499d7862-fc13-4d2b-8c15-6eab541a13f1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 847.837955] env[62627]: DEBUG nova.compute.manager [req-e38d3c12-7b46-4d2b-a52c-8a29f2c35a34 req-5a7415f9-402c-4853-8aa3-f5c2041f02bd service nova] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Refreshing instance network info cache due to event network-changed-499d7862-fc13-4d2b-8c15-6eab541a13f1. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 847.838577] env[62627]: DEBUG oslo_concurrency.lockutils [req-e38d3c12-7b46-4d2b-a52c-8a29f2c35a34 req-5a7415f9-402c-4853-8aa3-f5c2041f02bd service nova] Acquiring lock "refresh_cache-32a3b8bb-7c3a-4e30-8a50-37ce53e71882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.838577] env[62627]: DEBUG oslo_concurrency.lockutils [req-e38d3c12-7b46-4d2b-a52c-8a29f2c35a34 req-5a7415f9-402c-4853-8aa3-f5c2041f02bd service nova] Acquired lock "refresh_cache-32a3b8bb-7c3a-4e30-8a50-37ce53e71882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.838577] env[62627]: DEBUG nova.network.neutron [req-e38d3c12-7b46-4d2b-a52c-8a29f2c35a34 req-5a7415f9-402c-4853-8aa3-f5c2041f02bd service nova] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Refreshing network info cache for port 499d7862-fc13-4d2b-8c15-6eab541a13f1 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.843885] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194291, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.129730] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5ee060a4-18b1-430b-b006-fc65598f3752 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "aed62160-2db5-4008-a491-2b27ae81ef5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.162s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.203172] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194289, 'name': CreateVM_Task, 'duration_secs': 0.365033} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.203534] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 848.204505] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.204882] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.205256] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 848.205600] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8507106d-45fb-49ef-8439-c6e58a6cf281 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.211828] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for the task: (returnval){ [ 848.211828] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52076bba-33ef-01c4-bea1-e792538ded00" [ 848.211828] env[62627]: _type = "Task" [ 848.211828] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.222780] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52076bba-33ef-01c4-bea1-e792538ded00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.236365] env[62627]: DEBUG oslo_vmware.api [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194286, 'name': PowerOnVM_Task, 'duration_secs': 0.828337} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.237026] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 848.241690] env[62627]: DEBUG nova.compute.manager [None req-c6259ea2-dcab-4a0c-8998-3da60f6ee23c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 848.244489] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5204881a-35e5-4fa0-9527-5db407994473 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.330700] env[62627]: DEBUG oslo_vmware.api [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Task: {'id': task-2194291, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107642} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.331051] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 848.331301] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 848.332492] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 848.332492] env[62627]: INFO nova.compute.manager [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Took 2.33 seconds to destroy the instance on the hypervisor. [ 848.332492] env[62627]: DEBUG oslo.service.loopingcall [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.332492] env[62627]: DEBUG nova.compute.manager [-] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 848.332492] env[62627]: DEBUG nova.network.neutron [-] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.344860] env[62627]: DEBUG nova.scheduler.client.report [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 848.633732] env[62627]: DEBUG nova.compute.manager [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 848.723670] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52076bba-33ef-01c4-bea1-e792538ded00, 'name': SearchDatastore_Task, 'duration_secs': 0.015484} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.725033] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.725407] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.725659] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.725877] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.726210] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.726572] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93189a96-edf5-4bc0-8d3c-35a8ef78e60c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.736449] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.736639] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 848.737978] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74e66159-5031-4344-be9e-74821d34008b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.744355] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for the task: (returnval){ [ 848.744355] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525adbd1-bf58-8ce9-8ca2-5bcb13e35556" [ 848.744355] env[62627]: _type = "Task" [ 848.744355] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.752883] env[62627]: DEBUG nova.network.neutron [req-e38d3c12-7b46-4d2b-a52c-8a29f2c35a34 req-5a7415f9-402c-4853-8aa3-f5c2041f02bd service nova] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Updated VIF entry in instance network info cache for port 499d7862-fc13-4d2b-8c15-6eab541a13f1. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 848.753687] env[62627]: DEBUG nova.network.neutron [req-e38d3c12-7b46-4d2b-a52c-8a29f2c35a34 req-5a7415f9-402c-4853-8aa3-f5c2041f02bd service nova] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Updating instance_info_cache with network_info: [{"id": "499d7862-fc13-4d2b-8c15-6eab541a13f1", "address": "fa:16:3e:a1:f0:e6", "network": {"id": "c6063e60-287a-43ae-92a1-4006c8814395", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1063732032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73aa22242f52456f90a63516c2259102", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap499d7862-fc", "ovs_interfaceid": "499d7862-fc13-4d2b-8c15-6eab541a13f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.762497] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525adbd1-bf58-8ce9-8ca2-5bcb13e35556, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.853682] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.346s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.858654] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.880s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.859012] env[62627]: DEBUG nova.objects.instance [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lazy-loading 'resources' on Instance uuid 16c4065b-83cf-4b1c-92bf-66ce0a75e573 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 848.899094] env[62627]: INFO nova.scheduler.client.report [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleted allocations for instance 83998771-97a7-4458-834e-df1e4b2914ee [ 849.041461] env[62627]: DEBUG oslo_concurrency.lockutils [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Acquiring lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.042879] env[62627]: DEBUG oslo_concurrency.lockutils [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.168122] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.255015] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525adbd1-bf58-8ce9-8ca2-5bcb13e35556, 'name': SearchDatastore_Task, 'duration_secs': 0.017688} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.256398] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-376355a5-610d-4e39-9db3-789e6c5ca4d7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.261687] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for the task: (returnval){ [ 849.261687] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52763ec8-4c97-3e32-d702-f13c24bc0eae" [ 849.261687] env[62627]: _type = "Task" [ 849.261687] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.265361] env[62627]: DEBUG oslo_concurrency.lockutils [req-e38d3c12-7b46-4d2b-a52c-8a29f2c35a34 req-5a7415f9-402c-4853-8aa3-f5c2041f02bd service nova] Releasing lock "refresh_cache-32a3b8bb-7c3a-4e30-8a50-37ce53e71882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.270456] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52763ec8-4c97-3e32-d702-f13c24bc0eae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.407616] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a8b199b3-718d-4962-b925-ffcb4ce76c7b tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "83998771-97a7-4458-834e-df1e4b2914ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.220s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.548595] env[62627]: DEBUG nova.compute.utils [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 849.676253] env[62627]: DEBUG nova.network.neutron [-] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.777838] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52763ec8-4c97-3e32-d702-f13c24bc0eae, 'name': SearchDatastore_Task, 'duration_secs': 0.011501} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.778073] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.778379] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 32a3b8bb-7c3a-4e30-8a50-37ce53e71882/32a3b8bb-7c3a-4e30-8a50-37ce53e71882.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 849.778685] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-609e0ec2-89bc-4e7c-b444-16c58854dc3a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.786975] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for the task: (returnval){ [ 849.786975] env[62627]: value = "task-2194292" [ 849.786975] env[62627]: _type = "Task" [ 849.786975] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.798815] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.877600] env[62627]: DEBUG nova.compute.manager [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 849.878838] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220add4f-6ec0-4a16-a03a-0708f5696d14 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.971676] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8cbfd2-7a1a-47e3-8f6b-b424f0d80731 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.980966] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45eaa106-c8f3-4e60-b515-b0c9caea4c5a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.013210] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f10ccb8-1007-42c9-8890-708ee02379c7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.020705] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373642bd-54e9-4b67-8e42-9131e92cb0a1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.034179] env[62627]: DEBUG nova.compute.provider_tree [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.052607] env[62627]: DEBUG oslo_concurrency.lockutils [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.180581] env[62627]: INFO nova.compute.manager [-] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Took 1.85 seconds to deallocate network for instance. [ 850.301168] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194292, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.306041] env[62627]: DEBUG nova.compute.manager [req-f72d8746-92ce-41eb-b939-7e5fcc68f447 req-ef0df0d0-1d48-4958-af27-5f484b9ce08d service nova] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Received event network-vif-deleted-87592606-744f-4632-a44b-69f4b8ef9acf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 850.399893] env[62627]: INFO nova.compute.manager [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] instance snapshotting [ 850.402493] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f31a1ba-affd-4aca-9971-9f6d0ceb254b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.426839] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb47f30-2176-44c4-9c34-73af4fbf7fb7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.537723] env[62627]: DEBUG nova.scheduler.client.report [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 850.752145] env[62627]: INFO nova.compute.manager [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Took 0.57 seconds to detach 1 volumes for instance. [ 850.754521] env[62627]: DEBUG nova.compute.manager [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Deleting volume: 51233852-f0e4-4bab-903c-b0575335358d {{(pid=62627) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 850.806535] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194292, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.676025} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.806818] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 32a3b8bb-7c3a-4e30-8a50-37ce53e71882/32a3b8bb-7c3a-4e30-8a50-37ce53e71882.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.807134] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.807454] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-636620b3-a093-4a21-8bee-21600dd78768 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.818512] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for the task: (returnval){ [ 850.818512] env[62627]: value = "task-2194293" [ 850.818512] env[62627]: _type = "Task" [ 850.818512] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.831539] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194293, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.940728] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 850.940947] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ec3d6883-8527-4d76-81f3-c53090127fa6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.952842] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 850.952842] env[62627]: value = "task-2194295" [ 850.952842] env[62627]: _type = "Task" [ 850.952842] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.963712] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194295, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.042636] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.184s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.045092] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.414s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.046550] env[62627]: INFO nova.compute.claims [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.067176] env[62627]: INFO nova.scheduler.client.report [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Deleted allocations for instance 16c4065b-83cf-4b1c-92bf-66ce0a75e573 [ 851.130385] env[62627]: DEBUG oslo_concurrency.lockutils [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Acquiring lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.131404] env[62627]: DEBUG oslo_concurrency.lockutils [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.132661] env[62627]: INFO nova.compute.manager [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Attaching volume 8911edc0-40ae-43db-99b6-9eec97446da5 to /dev/sdb [ 851.172027] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6ad49f-06f3-4aa1-a9a3-2882ae15431b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.179707] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82b67dc-627b-46d8-a739-831f5e504429 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.196095] env[62627]: DEBUG nova.virt.block_device [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Updating existing volume attachment record: 8855c905-d746-4aed-920d-19fb98ea810a {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 851.302055] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.328453] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194293, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125064} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.328727] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.329602] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cbf8e2-8f45-4a84-a326-79cc3931f82e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.356193] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 32a3b8bb-7c3a-4e30-8a50-37ce53e71882/32a3b8bb-7c3a-4e30-8a50-37ce53e71882.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.356477] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1f17d19-89b5-482b-8ea5-80737da54759 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.376825] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for the task: (returnval){ [ 851.376825] env[62627]: value = "task-2194298" [ 851.376825] env[62627]: _type = "Task" [ 851.376825] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.389586] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194298, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.466443] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194295, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.499484] env[62627]: INFO nova.compute.manager [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Rescuing [ 851.499805] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.499960] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.500144] env[62627]: DEBUG nova.network.neutron [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 851.576038] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2466423-9be2-4379-af8b-6ed3567af23e tempest-ServerShowV254Test-1641358656 tempest-ServerShowV254Test-1641358656-project-member] Lock "16c4065b-83cf-4b1c-92bf-66ce0a75e573" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.933s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.889418] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194298, 'name': ReconfigVM_Task, 'duration_secs': 0.316447} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.889814] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 32a3b8bb-7c3a-4e30-8a50-37ce53e71882/32a3b8bb-7c3a-4e30-8a50-37ce53e71882.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 851.890694] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20eeefaa-acbd-4f63-84b4-8d50d293c542 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.899846] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for the task: (returnval){ [ 851.899846] env[62627]: value = "task-2194300" [ 851.899846] env[62627]: _type = "Task" [ 851.899846] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.909601] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194300, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.969908] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194295, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.323451] env[62627]: DEBUG nova.network.neutron [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Updating instance_info_cache with network_info: [{"id": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "address": "fa:16:3e:e3:64:4d", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddd3e21b-14", "ovs_interfaceid": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.414709] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194300, 'name': Rename_Task, 'duration_secs': 0.170356} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.417525] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 852.418780] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57e8d0a2-8ce3-4e3a-b4df-32f413d670f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.427123] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for the task: (returnval){ [ 852.427123] env[62627]: value = "task-2194301" [ 852.427123] env[62627]: _type = "Task" [ 852.427123] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.440679] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194301, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.466785] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194295, 'name': CreateSnapshot_Task, 'duration_secs': 1.231473} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.467115] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 852.467969] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8311048b-b8c1-4ace-bb35-f7ec880e0908 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.640018] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec46b9ec-967b-4362-91ec-4dc75712dbc1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.649748] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf1ac97-1d30-4855-93bd-8dcb4d182e65 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.684262] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0731fcfb-d996-49cd-be5a-695e66ad33e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.693080] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386febf5-f1ae-492a-8303-26421136f687 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.709245] env[62627]: DEBUG nova.compute.provider_tree [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.826425] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.939774] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194301, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.988972] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 852.989440] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-34dfdf08-47da-4528-a9e7-12b55bff60b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.999350] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 852.999350] env[62627]: value = "task-2194302" [ 852.999350] env[62627]: _type = "Task" [ 852.999350] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.009487] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194302, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.216419] env[62627]: DEBUG nova.scheduler.client.report [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 853.438846] env[62627]: DEBUG oslo_vmware.api [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194301, 'name': PowerOnVM_Task, 'duration_secs': 0.515471} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.439286] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 853.439404] env[62627]: INFO nova.compute.manager [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Took 8.15 seconds to spawn the instance on the hypervisor. [ 853.439588] env[62627]: DEBUG nova.compute.manager [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 853.440378] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d9735a-4b8a-494f-8ffa-a88474590095 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.510310] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194302, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.723162] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.724081] env[62627]: DEBUG nova.compute.manager [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 853.727209] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.361s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.729153] env[62627]: INFO nova.compute.claims [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.959552] env[62627]: INFO nova.compute.manager [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Took 43.77 seconds to build instance. [ 854.010958] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194302, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.234700] env[62627]: DEBUG nova.compute.utils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 854.242099] env[62627]: DEBUG nova.compute.manager [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 854.242425] env[62627]: DEBUG nova.network.neutron [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 854.296713] env[62627]: DEBUG nova.policy [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27f56b55b5da498a94fe889d63538d53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca351451d2344fc1872f6faf89412354', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 854.362882] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.363232] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9d31a91-82b8-40f0-a53a-7f48334dc6bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.372939] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 854.372939] env[62627]: value = "task-2194304" [ 854.372939] env[62627]: _type = "Task" [ 854.372939] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.383967] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194304, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.461790] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24933a10-6d62-45c8-a305-97ad42ef4e4b tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.089s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.513542] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194302, 'name': CloneVM_Task} progress is 95%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.647613] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquiring lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.648166] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.648480] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquiring lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.648680] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.648853] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.651322] env[62627]: INFO nova.compute.manager [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Terminating instance [ 854.740489] env[62627]: DEBUG nova.compute.manager [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 854.754255] env[62627]: DEBUG nova.network.neutron [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Successfully created port: bf72690a-0358-469d-8d21-85ef459cbcb9 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 854.890713] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194304, 'name': PowerOffVM_Task, 'duration_secs': 0.353498} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.892189] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 854.892189] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bd9b56-a42d-45f5-af61-96e154be6d83 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.919173] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4006c563-18ba-4bda-afb7-24cc9023a04f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.965021] env[62627]: DEBUG nova.compute.manager [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 854.977140] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.977140] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8491c24e-0773-40f2-9023-7d1422302665 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.991942] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 854.991942] env[62627]: value = "task-2194305" [ 854.991942] env[62627]: _type = "Task" [ 854.991942] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.007681] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 855.007924] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.008194] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.008335] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.009288] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.009288] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f0c5039-d414-4f7b-8b11-9f087dc9402d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.023195] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194302, 'name': CloneVM_Task, 'duration_secs': 2.014308} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.024379] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Created linked-clone VM from snapshot [ 855.024732] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.024897] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.026565] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde9cfc2-2bcf-4751-9d78-3857e9029355 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.029805] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68e4f44d-6c0b-4ac7-b1c8-9399f28c05c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.042980] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Uploading image 23f5f7b2-7c7c-4574-9060-cdeebd60d98b {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 855.047665] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 855.047665] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52270497-d128-b793-cc62-7c13f388f4e4" [ 855.047665] env[62627]: _type = "Task" [ 855.047665] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.059512] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52270497-d128-b793-cc62-7c13f388f4e4, 'name': SearchDatastore_Task, 'duration_secs': 0.011552} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.060311] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-520d0e49-1d46-49bd-b01f-acdccca766e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.072751] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 855.072751] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5231a04a-e239-c9af-228a-b6ceaf68d9ef" [ 855.072751] env[62627]: _type = "Task" [ 855.072751] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.074837] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 855.074837] env[62627]: value = "vm-447646" [ 855.074837] env[62627]: _type = "VirtualMachine" [ 855.074837] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 855.075140] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ba5c1609-2045-4001-b86f-c6391b054da1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.089089] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5231a04a-e239-c9af-228a-b6ceaf68d9ef, 'name': SearchDatastore_Task, 'duration_secs': 0.013893} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.090786] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.090786] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 241311b0-ba81-41d1-a9b6-cd40d04dbe3c/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. {{(pid=62627) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 855.090965] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lease: (returnval){ [ 855.090965] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520af790-f5f7-d0f1-f02b-df4d87cbe267" [ 855.090965] env[62627]: _type = "HttpNfcLease" [ 855.090965] env[62627]: } obtained for exporting VM: (result){ [ 855.090965] env[62627]: value = "vm-447646" [ 855.090965] env[62627]: _type = "VirtualMachine" [ 855.090965] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 855.091207] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the lease: (returnval){ [ 855.091207] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520af790-f5f7-d0f1-f02b-df4d87cbe267" [ 855.091207] env[62627]: _type = "HttpNfcLease" [ 855.091207] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 855.092044] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8243d0f9-fc74-4b19-abbc-34f410734850 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.108019] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 855.108019] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520af790-f5f7-d0f1-f02b-df4d87cbe267" [ 855.108019] env[62627]: _type = "HttpNfcLease" [ 855.108019] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 855.108019] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 855.108019] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520af790-f5f7-d0f1-f02b-df4d87cbe267" [ 855.108019] env[62627]: _type = "HttpNfcLease" [ 855.108019] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 855.108019] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 855.108019] env[62627]: value = "task-2194307" [ 855.108019] env[62627]: _type = "Task" [ 855.108019] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.108019] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd03d7f4-b23f-401e-ac6b-8803eda18dd6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.125084] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5226bf73-fad4-14be-e37e-63a3da716528/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 855.125084] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5226bf73-fad4-14be-e37e-63a3da716528/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 855.181261] env[62627]: DEBUG nova.compute.manager [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 855.181362] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.188092] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9b7cb7-a26c-4368-8569-5c395bacbacf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.197657] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.197657] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-884c97ac-e40a-479d-bf98-1b206b4e12f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.205892] env[62627]: DEBUG oslo_vmware.api [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for the task: (returnval){ [ 855.205892] env[62627]: value = "task-2194308" [ 855.205892] env[62627]: _type = "Task" [ 855.205892] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.215852] env[62627]: DEBUG oslo_vmware.api [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.273949] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c342af47-afba-4fe1-80d7-92eb334c92ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.458040] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80859149-e9b7-483f-9102-0f450e5d6f4d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.468070] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d551c2fe-663e-49c4-a752-c598ea7f50f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.505360] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d1f32b-e907-4d50-bbf3-027962d22e52 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.515772] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35b9506-05eb-4a26-a6ee-6ba89311333f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.522415] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.534513] env[62627]: DEBUG nova.compute.provider_tree [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.622886] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194307, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.721047] env[62627]: DEBUG oslo_vmware.api [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194308, 'name': PowerOffVM_Task, 'duration_secs': 0.218529} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.721047] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.721047] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.721047] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00ee063b-580d-43ba-8fc6-5e52cd9996c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.745285] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 855.745545] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447644', 'volume_id': '8911edc0-40ae-43db-99b6-9eec97446da5', 'name': 'volume-8911edc0-40ae-43db-99b6-9eec97446da5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db8ddb7b-b119-415c-98d9-dc5b3d2868b7', 'attached_at': '', 'detached_at': '', 'volume_id': '8911edc0-40ae-43db-99b6-9eec97446da5', 'serial': '8911edc0-40ae-43db-99b6-9eec97446da5'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 855.746527] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27bcd7aa-b7fe-4b3e-b946-f33814e01292 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.750099] env[62627]: DEBUG nova.compute.manager [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 855.772244] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd955818-f394-43e6-9548-abe70405055c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.801605] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] volume-8911edc0-40ae-43db-99b6-9eec97446da5/volume-8911edc0-40ae-43db-99b6-9eec97446da5.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.804208] env[62627]: DEBUG nova.virt.hardware [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 855.804509] env[62627]: DEBUG nova.virt.hardware [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.804633] env[62627]: DEBUG nova.virt.hardware [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 855.804859] env[62627]: DEBUG nova.virt.hardware [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.804997] env[62627]: DEBUG nova.virt.hardware [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 855.805215] env[62627]: DEBUG nova.virt.hardware [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 855.805432] env[62627]: DEBUG nova.virt.hardware [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 855.805592] env[62627]: DEBUG nova.virt.hardware [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 855.805793] env[62627]: DEBUG nova.virt.hardware [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 855.805916] env[62627]: DEBUG nova.virt.hardware [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 855.806098] env[62627]: DEBUG nova.virt.hardware [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 855.807448] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5961cd17-f72c-4fa1-accd-c9225346f4c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.822974] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9836ce48-4887-4c12-901a-c4f526042178 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.827281] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.827527] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.827716] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Deleting the datastore file [datastore2] 32a3b8bb-7c3a-4e30-8a50-37ce53e71882 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.828079] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61711589-cccc-44f4-a24a-79473fe4785b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.837157] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a899a1ec-c9fa-485b-b102-0509c8dee4d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.843974] env[62627]: DEBUG oslo_vmware.api [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for the task: (returnval){ [ 855.843974] env[62627]: value = "task-2194310" [ 855.843974] env[62627]: _type = "Task" [ 855.843974] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.844418] env[62627]: DEBUG oslo_vmware.api [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Waiting for the task: (returnval){ [ 855.844418] env[62627]: value = "task-2194311" [ 855.844418] env[62627]: _type = "Task" [ 855.844418] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.869883] env[62627]: DEBUG oslo_vmware.api [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.874184] env[62627]: DEBUG oslo_vmware.api [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Task: {'id': task-2194311, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.040646] env[62627]: DEBUG nova.scheduler.client.report [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 856.122957] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194307, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.669464} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.123175] env[62627]: INFO nova.virt.vmwareapi.ds_util [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 241311b0-ba81-41d1-a9b6-cd40d04dbe3c/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. [ 856.124261] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec026e6c-056e-41d8-912f-79a19382a22f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.160676] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 241311b0-ba81-41d1-a9b6-cd40d04dbe3c/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 856.161481] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56658275-9f1a-4ea2-b19b-3197dea18e37 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.182878] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 856.182878] env[62627]: value = "task-2194312" [ 856.182878] env[62627]: _type = "Task" [ 856.182878] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.192902] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194312, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.363135] env[62627]: DEBUG oslo_vmware.api [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Task: {'id': task-2194310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.306207} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.366912] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.367333] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 856.367673] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 856.368072] env[62627]: INFO nova.compute.manager [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Took 1.19 seconds to destroy the instance on the hypervisor. [ 856.368457] env[62627]: DEBUG oslo.service.loopingcall [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.368798] env[62627]: DEBUG oslo_vmware.api [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Task: {'id': task-2194311, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.369166] env[62627]: DEBUG nova.compute.manager [-] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 856.369380] env[62627]: DEBUG nova.network.neutron [-] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 856.551795] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.824s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.554259] env[62627]: DEBUG nova.compute.manager [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 856.557530] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.283s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.559227] env[62627]: INFO nova.compute.claims [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.695216] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194312, 'name': ReconfigVM_Task, 'duration_secs': 0.440221} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.695785] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 241311b0-ba81-41d1-a9b6-cd40d04dbe3c/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.697183] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7965e7fd-9786-49e7-8b91-67173fd15425 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.727357] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4076265-817d-434c-974f-50203a712c7c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.745559] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 856.745559] env[62627]: value = "task-2194313" [ 856.745559] env[62627]: _type = "Task" [ 856.745559] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.757119] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194313, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.842236] env[62627]: DEBUG nova.network.neutron [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Successfully updated port: bf72690a-0358-469d-8d21-85ef459cbcb9 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 856.865266] env[62627]: DEBUG oslo_vmware.api [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Task: {'id': task-2194311, 'name': ReconfigVM_Task, 'duration_secs': 0.647607} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.865719] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Reconfigured VM instance instance-0000000a to attach disk [datastore1] volume-8911edc0-40ae-43db-99b6-9eec97446da5/volume-8911edc0-40ae-43db-99b6-9eec97446da5.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.871414] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2b01df0-31dd-4a40-a0c7-a1b0deeb4a42 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.889892] env[62627]: DEBUG oslo_vmware.api [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Waiting for the task: (returnval){ [ 856.889892] env[62627]: value = "task-2194314" [ 856.889892] env[62627]: _type = "Task" [ 856.889892] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.900631] env[62627]: DEBUG oslo_vmware.api [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Task: {'id': task-2194314, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.069118] env[62627]: DEBUG nova.compute.utils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 857.072563] env[62627]: DEBUG nova.compute.manager [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 857.072740] env[62627]: DEBUG nova.network.neutron [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 857.140273] env[62627]: DEBUG nova.policy [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3701a6736dc443af803253d74ddab366', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4aaebf66b6894d2b9f1454dc785e0d9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 857.258333] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194313, 'name': ReconfigVM_Task, 'duration_secs': 0.199746} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.258634] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.259274] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e084754-d3be-431f-8591-7924a5bb0b57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.267501] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 857.267501] env[62627]: value = "task-2194315" [ 857.267501] env[62627]: _type = "Task" [ 857.267501] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.276704] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194315, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.343457] env[62627]: DEBUG nova.network.neutron [-] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.345300] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.345431] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquired lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.345572] env[62627]: DEBUG nova.network.neutron [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 857.401907] env[62627]: DEBUG oslo_vmware.api [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Task: {'id': task-2194314, 'name': ReconfigVM_Task, 'duration_secs': 0.161} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.402286] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447644', 'volume_id': '8911edc0-40ae-43db-99b6-9eec97446da5', 'name': 'volume-8911edc0-40ae-43db-99b6-9eec97446da5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db8ddb7b-b119-415c-98d9-dc5b3d2868b7', 'attached_at': '', 'detached_at': '', 'volume_id': '8911edc0-40ae-43db-99b6-9eec97446da5', 'serial': '8911edc0-40ae-43db-99b6-9eec97446da5'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 857.430777] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c48df6-f4fd-0261-dc27-112dbccba361/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 857.431957] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2012db1-d63f-4c2c-96f7-3010ebc3db06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.451449] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c48df6-f4fd-0261-dc27-112dbccba361/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 857.451449] env[62627]: ERROR oslo_vmware.rw_handles [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c48df6-f4fd-0261-dc27-112dbccba361/disk-0.vmdk due to incomplete transfer. [ 857.451449] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3ecda4d5-2f87-44b7-9ef8-087ba4fdc25d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.458444] env[62627]: DEBUG oslo_vmware.rw_handles [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c48df6-f4fd-0261-dc27-112dbccba361/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 857.458737] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Uploaded image c4bcb9e5-6a75-4d8c-9f62-99a25e841d24 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 857.461110] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 857.461380] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3e0545ce-d21c-4d1a-9ba0-b86d040761b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.470008] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 857.470008] env[62627]: value = "task-2194316" [ 857.470008] env[62627]: _type = "Task" [ 857.470008] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.482013] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194316, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.526773] env[62627]: DEBUG nova.compute.manager [req-2b719f1b-9ceb-4a81-a960-bf67b5bea49f req-b6d0dd15-d372-46ca-abf3-87a6a86ee2eb service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Received event network-vif-plugged-bf72690a-0358-469d-8d21-85ef459cbcb9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 857.526773] env[62627]: DEBUG oslo_concurrency.lockutils [req-2b719f1b-9ceb-4a81-a960-bf67b5bea49f req-b6d0dd15-d372-46ca-abf3-87a6a86ee2eb service nova] Acquiring lock "d28285ed-2ec0-43a4-bd34-82d33816fea4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.526773] env[62627]: DEBUG oslo_concurrency.lockutils [req-2b719f1b-9ceb-4a81-a960-bf67b5bea49f req-b6d0dd15-d372-46ca-abf3-87a6a86ee2eb service nova] Lock "d28285ed-2ec0-43a4-bd34-82d33816fea4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.527083] env[62627]: DEBUG oslo_concurrency.lockutils [req-2b719f1b-9ceb-4a81-a960-bf67b5bea49f req-b6d0dd15-d372-46ca-abf3-87a6a86ee2eb service nova] Lock "d28285ed-2ec0-43a4-bd34-82d33816fea4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.528091] env[62627]: DEBUG nova.compute.manager [req-2b719f1b-9ceb-4a81-a960-bf67b5bea49f req-b6d0dd15-d372-46ca-abf3-87a6a86ee2eb service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] No waiting events found dispatching network-vif-plugged-bf72690a-0358-469d-8d21-85ef459cbcb9 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 857.528348] env[62627]: WARNING nova.compute.manager [req-2b719f1b-9ceb-4a81-a960-bf67b5bea49f req-b6d0dd15-d372-46ca-abf3-87a6a86ee2eb service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Received unexpected event network-vif-plugged-bf72690a-0358-469d-8d21-85ef459cbcb9 for instance with vm_state building and task_state spawning. [ 857.531037] env[62627]: DEBUG nova.network.neutron [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Successfully created port: 2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 857.578737] env[62627]: DEBUG nova.compute.manager [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 857.780342] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194315, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.853645] env[62627]: INFO nova.compute.manager [-] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Took 1.48 seconds to deallocate network for instance. [ 857.913023] env[62627]: DEBUG nova.network.neutron [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 857.984257] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194316, 'name': Destroy_Task, 'duration_secs': 0.440117} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.984714] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Destroyed the VM [ 857.984975] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 857.985272] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cd7a8dc1-d86f-4230-8487-d132588a39e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.994644] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 857.994644] env[62627]: value = "task-2194317" [ 857.994644] env[62627]: _type = "Task" [ 857.994644] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.004814] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194317, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.198281] env[62627]: DEBUG nova.network.neutron [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Updating instance_info_cache with network_info: [{"id": "bf72690a-0358-469d-8d21-85ef459cbcb9", "address": "fa:16:3e:16:7f:81", "network": {"id": "d75e7b13-f4ab-4626-9799-4970d691f831", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-781733128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca351451d2344fc1872f6faf89412354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf72690a-03", "ovs_interfaceid": "bf72690a-0358-469d-8d21-85ef459cbcb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.209237] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca24da3d-b62f-45f7-b69c-cd18c43f1085 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.218812] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a652513-d467-4ab5-9dd2-80fb58eb9183 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.257223] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcedbe3b-cbcf-4718-925a-5b16d437580a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.264048] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4af7d9-2289-4d8f-b703-34afdc782656 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.282423] env[62627]: DEBUG nova.compute.provider_tree [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.290206] env[62627]: DEBUG oslo_vmware.api [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194315, 'name': PowerOnVM_Task, 'duration_secs': 0.655687} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.290553] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 858.293960] env[62627]: DEBUG nova.compute.manager [None req-a7546816-7c5c-4883-b8d1-58625e952e60 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 858.295146] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6389346b-b170-4735-984e-72eaf3312ab6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.361152] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.465513] env[62627]: DEBUG nova.objects.instance [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Lazy-loading 'flavor' on Instance uuid db8ddb7b-b119-415c-98d9-dc5b3d2868b7 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 858.505111] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194317, 'name': RemoveSnapshot_Task} progress is 29%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.594269] env[62627]: DEBUG nova.compute.manager [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 858.620385] env[62627]: DEBUG nova.virt.hardware [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 858.620722] env[62627]: DEBUG nova.virt.hardware [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 858.621171] env[62627]: DEBUG nova.virt.hardware [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 858.621251] env[62627]: DEBUG nova.virt.hardware [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 858.621392] env[62627]: DEBUG nova.virt.hardware [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 858.621621] env[62627]: DEBUG nova.virt.hardware [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 858.621897] env[62627]: DEBUG nova.virt.hardware [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 858.622136] env[62627]: DEBUG nova.virt.hardware [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 858.622368] env[62627]: DEBUG nova.virt.hardware [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 858.622614] env[62627]: DEBUG nova.virt.hardware [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 858.622859] env[62627]: DEBUG nova.virt.hardware [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 858.623936] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cfba8b-4ac1-4c09-8221-f407d422ced0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.633670] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7daa15-8c58-4574-a8f5-57f0c96f04ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.701143] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Releasing lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.701488] env[62627]: DEBUG nova.compute.manager [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Instance network_info: |[{"id": "bf72690a-0358-469d-8d21-85ef459cbcb9", "address": "fa:16:3e:16:7f:81", "network": {"id": "d75e7b13-f4ab-4626-9799-4970d691f831", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-781733128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca351451d2344fc1872f6faf89412354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf72690a-03", "ovs_interfaceid": "bf72690a-0358-469d-8d21-85ef459cbcb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 858.702056] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:7f:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf72690a-0358-469d-8d21-85ef459cbcb9', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 858.710092] env[62627]: DEBUG oslo.service.loopingcall [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.710325] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 858.710641] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94fbfa45-3a26-4e21-8744-a81854b28cd5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.733105] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 858.733105] env[62627]: value = "task-2194318" [ 858.733105] env[62627]: _type = "Task" [ 858.733105] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.741607] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194318, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.787031] env[62627]: DEBUG nova.scheduler.client.report [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 858.971144] env[62627]: DEBUG oslo_concurrency.lockutils [None req-633629a7-9318-4a0d-85ef-8b58214e0ddc tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.840s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.005318] env[62627]: DEBUG oslo_vmware.api [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194317, 'name': RemoveSnapshot_Task, 'duration_secs': 0.922075} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.005687] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 859.005840] env[62627]: INFO nova.compute.manager [None req-e89666a8-aabd-4570-9fb7-9db72db6fb35 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Took 18.98 seconds to snapshot the instance on the hypervisor. [ 859.245012] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194318, 'name': CreateVM_Task, 'duration_secs': 0.437589} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.245207] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 859.245989] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.246174] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.246527] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 859.246783] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-576c8977-6825-4bcb-b111-b1416f69047e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.254065] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 859.254065] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5299ece1-4793-b28b-40e1-7f1bba084744" [ 859.254065] env[62627]: _type = "Task" [ 859.254065] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.263380] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5299ece1-4793-b28b-40e1-7f1bba084744, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.292058] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.292581] env[62627]: DEBUG nova.compute.manager [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 859.295590] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.204s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.295820] env[62627]: DEBUG nova.objects.instance [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lazy-loading 'resources' on Instance uuid ea289de9-9b2d-4756-a9a5-0e3dfe8db772 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 859.328290] env[62627]: DEBUG nova.network.neutron [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Successfully updated port: 2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 859.574879] env[62627]: INFO nova.compute.manager [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Unrescuing [ 859.575198] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.575914] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.575914] env[62627]: DEBUG nova.network.neutron [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 859.628341] env[62627]: DEBUG nova.compute.manager [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Received event network-vif-deleted-499d7862-fc13-4d2b-8c15-6eab541a13f1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 859.628611] env[62627]: DEBUG nova.compute.manager [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Received event network-changed-bf72690a-0358-469d-8d21-85ef459cbcb9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 859.628735] env[62627]: DEBUG nova.compute.manager [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Refreshing instance network info cache due to event network-changed-bf72690a-0358-469d-8d21-85ef459cbcb9. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 859.628949] env[62627]: DEBUG oslo_concurrency.lockutils [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] Acquiring lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.629063] env[62627]: DEBUG oslo_concurrency.lockutils [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] Acquired lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.629228] env[62627]: DEBUG nova.network.neutron [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Refreshing network info cache for port bf72690a-0358-469d-8d21-85ef459cbcb9 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 859.766117] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5299ece1-4793-b28b-40e1-7f1bba084744, 'name': SearchDatastore_Task, 'duration_secs': 0.012461} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.766301] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.766529] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 859.766765] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.766909] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.767106] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 859.767369] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77ad6617-0a87-4c2b-8f02-fa786608edf5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.778228] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 859.778414] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 859.779174] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deef9328-b485-4aef-aaf0-564744a5ed9e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.786029] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 859.786029] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529c3824-252c-677f-2fd0-ae0ee14f2c56" [ 859.786029] env[62627]: _type = "Task" [ 859.786029] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.793779] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529c3824-252c-677f-2fd0-ae0ee14f2c56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.801475] env[62627]: DEBUG nova.compute.utils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 859.803016] env[62627]: DEBUG nova.compute.manager [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 859.803206] env[62627]: DEBUG nova.network.neutron [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 859.831660] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquiring lock "refresh_cache-5f0240da-bec1-4bf3-a975-6ed3d0630a2d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.831891] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquired lock "refresh_cache-5f0240da-bec1-4bf3-a975-6ed3d0630a2d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.832040] env[62627]: DEBUG nova.network.neutron [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 859.846805] env[62627]: DEBUG nova.policy [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d067f5bbaa846048d3e9e3d857310a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6ad7b328aba44b51aec9455bada6678c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 860.108527] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Acquiring lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.108674] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.299419] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529c3824-252c-677f-2fd0-ae0ee14f2c56, 'name': SearchDatastore_Task, 'duration_secs': 0.029886} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.302134] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8964fde2-248e-413f-975b-70a63d1b7937 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.306798] env[62627]: DEBUG nova.compute.manager [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 860.312728] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 860.312728] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525986cd-f2f1-2dc1-a25d-c4500fc8362e" [ 860.312728] env[62627]: _type = "Task" [ 860.312728] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.326447] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525986cd-f2f1-2dc1-a25d-c4500fc8362e, 'name': SearchDatastore_Task} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.329754] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.330018] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] d28285ed-2ec0-43a4-bd34-82d33816fea4/d28285ed-2ec0-43a4-bd34-82d33816fea4.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 860.330536] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f780e6e-ad50-4448-af3c-cbf5ad97fceb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.339103] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 860.339103] env[62627]: value = "task-2194319" [ 860.339103] env[62627]: _type = "Task" [ 860.339103] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.347917] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a8afa8-9603-4e71-a461-2575b72bacf3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.358488] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.361226] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24aa9148-40e7-4070-8507-388611038b22 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.403259] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b22835-41a6-4bc6-b32d-a5e582d39ccd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.412825] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced1d816-fee9-4fd0-8700-6e64a0074233 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.428586] env[62627]: DEBUG nova.compute.provider_tree [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.436830] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquiring lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.437512] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.437512] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquiring lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.437721] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.440087] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.440087] env[62627]: INFO nova.compute.manager [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Terminating instance [ 860.447347] env[62627]: DEBUG nova.network.neutron [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 860.612274] env[62627]: INFO nova.compute.manager [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Detaching volume 8911edc0-40ae-43db-99b6-9eec97446da5 [ 860.658949] env[62627]: INFO nova.virt.block_device [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Attempting to driver detach volume 8911edc0-40ae-43db-99b6-9eec97446da5 from mountpoint /dev/sdb [ 860.659763] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 860.659763] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447644', 'volume_id': '8911edc0-40ae-43db-99b6-9eec97446da5', 'name': 'volume-8911edc0-40ae-43db-99b6-9eec97446da5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db8ddb7b-b119-415c-98d9-dc5b3d2868b7', 'attached_at': '', 'detached_at': '', 'volume_id': '8911edc0-40ae-43db-99b6-9eec97446da5', 'serial': '8911edc0-40ae-43db-99b6-9eec97446da5'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 860.662445] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f677c09c-0992-4434-b4ae-2de14bbff7ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.703355] env[62627]: DEBUG nova.network.neutron [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Successfully created port: f91eb28c-7881-4e2c-b51e-7c0f169652d1 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 860.711022] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef360626-3748-47f3-9506-b1a8112afbf3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.727263] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9dc7593-0d29-46ab-b3c9-6b54c4fc1d9f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.748093] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed5fdcb-167e-482c-9db0-25330bc4edc8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.765413] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] The volume has not been displaced from its original location: [datastore1] volume-8911edc0-40ae-43db-99b6-9eec97446da5/volume-8911edc0-40ae-43db-99b6-9eec97446da5.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 860.770758] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Reconfiguring VM instance instance-0000000a to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 860.771194] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03826fad-7a8c-4d1b-aa8b-1dd8593feece {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.791704] env[62627]: DEBUG oslo_vmware.api [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Waiting for the task: (returnval){ [ 860.791704] env[62627]: value = "task-2194320" [ 860.791704] env[62627]: _type = "Task" [ 860.791704] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.804298] env[62627]: DEBUG oslo_vmware.api [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Task: {'id': task-2194320, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.855302] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194319, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.873947] env[62627]: DEBUG nova.network.neutron [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Updating instance_info_cache with network_info: [{"id": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "address": "fa:16:3e:e3:64:4d", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddd3e21b-14", "ovs_interfaceid": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.879467] env[62627]: DEBUG nova.network.neutron [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Updated VIF entry in instance network info cache for port bf72690a-0358-469d-8d21-85ef459cbcb9. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 860.879947] env[62627]: DEBUG nova.network.neutron [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Updating instance_info_cache with network_info: [{"id": "bf72690a-0358-469d-8d21-85ef459cbcb9", "address": "fa:16:3e:16:7f:81", "network": {"id": "d75e7b13-f4ab-4626-9799-4970d691f831", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-781733128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca351451d2344fc1872f6faf89412354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf72690a-03", "ovs_interfaceid": "bf72690a-0358-469d-8d21-85ef459cbcb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.936078] env[62627]: DEBUG nova.scheduler.client.report [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 860.944247] env[62627]: DEBUG nova.compute.manager [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 860.944576] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 860.945837] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e284aa4-599a-47ae-a805-659bc6036862 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.957325] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 860.958501] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-836e62a0-258e-4a76-a803-059377295c3f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.969467] env[62627]: DEBUG oslo_vmware.api [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 860.969467] env[62627]: value = "task-2194321" [ 860.969467] env[62627]: _type = "Task" [ 860.969467] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.970633] env[62627]: DEBUG nova.network.neutron [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Updating instance_info_cache with network_info: [{"id": "2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f", "address": "fa:16:3e:17:96:86", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2807cd3b-e8", "ovs_interfaceid": "2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.982992] env[62627]: DEBUG oslo_vmware.api [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194321, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.302707] env[62627]: DEBUG oslo_vmware.api [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Task: {'id': task-2194320, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.320095] env[62627]: DEBUG nova.compute.manager [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 861.325856] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "interface-a12855e2-1544-4e85-a7ff-c026fec861b6-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.326328] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-a12855e2-1544-4e85-a7ff-c026fec861b6-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.326460] env[62627]: DEBUG nova.objects.instance [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'flavor' on Instance uuid a12855e2-1544-4e85-a7ff-c026fec861b6 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 861.348724] env[62627]: DEBUG nova.virt.hardware [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 861.348975] env[62627]: DEBUG nova.virt.hardware [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 861.349152] env[62627]: DEBUG nova.virt.hardware [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 861.349335] env[62627]: DEBUG nova.virt.hardware [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 861.349478] env[62627]: DEBUG nova.virt.hardware [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 861.349623] env[62627]: DEBUG nova.virt.hardware [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 861.349823] env[62627]: DEBUG nova.virt.hardware [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 861.350452] env[62627]: DEBUG nova.virt.hardware [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 861.350452] env[62627]: DEBUG nova.virt.hardware [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 861.350452] env[62627]: DEBUG nova.virt.hardware [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 861.350632] env[62627]: DEBUG nova.virt.hardware [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 861.354719] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3058e3fa-daef-4030-b4a5-87634981b5cf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.366843] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0feca3c-40bb-4630-bc9b-3db75ef5f42f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.371785] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194319, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.918258} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.372115] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] d28285ed-2ec0-43a4-bd34-82d33816fea4/d28285ed-2ec0-43a4-bd34-82d33816fea4.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 861.372333] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 861.373324] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7fdb0769-3c4a-4955-9342-45e3808443ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.375718] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.376331] env[62627]: DEBUG nova.objects.instance [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lazy-loading 'flavor' on Instance uuid 241311b0-ba81-41d1-a9b6-cd40d04dbe3c {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 861.383150] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 861.383150] env[62627]: value = "task-2194322" [ 861.383150] env[62627]: _type = "Task" [ 861.383150] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.391616] env[62627]: DEBUG oslo_concurrency.lockutils [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] Releasing lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.391616] env[62627]: DEBUG nova.compute.manager [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Received event network-vif-plugged-2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 861.391780] env[62627]: DEBUG oslo_concurrency.lockutils [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] Acquiring lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.392044] env[62627]: DEBUG oslo_concurrency.lockutils [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] Lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.392282] env[62627]: DEBUG oslo_concurrency.lockutils [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] Lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.393030] env[62627]: DEBUG nova.compute.manager [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] No waiting events found dispatching network-vif-plugged-2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 861.393030] env[62627]: WARNING nova.compute.manager [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Received unexpected event network-vif-plugged-2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f for instance with vm_state building and task_state spawning. [ 861.393030] env[62627]: DEBUG nova.compute.manager [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Received event network-changed-2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 861.393030] env[62627]: DEBUG nova.compute.manager [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Refreshing instance network info cache due to event network-changed-2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 861.393255] env[62627]: DEBUG oslo_concurrency.lockutils [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] Acquiring lock "refresh_cache-5f0240da-bec1-4bf3-a975-6ed3d0630a2d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.405554] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194322, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.442940] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.148s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.447187] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.245s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.448282] env[62627]: INFO nova.compute.claims [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.471849] env[62627]: INFO nova.scheduler.client.report [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted allocations for instance ea289de9-9b2d-4756-a9a5-0e3dfe8db772 [ 861.479475] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Releasing lock "refresh_cache-5f0240da-bec1-4bf3-a975-6ed3d0630a2d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.479767] env[62627]: DEBUG nova.compute.manager [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Instance network_info: |[{"id": "2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f", "address": "fa:16:3e:17:96:86", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2807cd3b-e8", "ovs_interfaceid": "2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 861.480273] env[62627]: DEBUG oslo_concurrency.lockutils [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] Acquired lock "refresh_cache-5f0240da-bec1-4bf3-a975-6ed3d0630a2d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.480458] env[62627]: DEBUG nova.network.neutron [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Refreshing network info cache for port 2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 861.481459] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:96:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.489488] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Creating folder: Project (4aaebf66b6894d2b9f1454dc785e0d9c). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.490400] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35c7141c-c5d0-4402-ab6a-2c3e88f32c44 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.495833] env[62627]: DEBUG oslo_vmware.api [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194321, 'name': PowerOffVM_Task, 'duration_secs': 0.46096} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.497087] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 861.497268] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 861.497519] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90aaf1cd-587e-4434-b3bb-0e6a4f6686c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.502079] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Created folder: Project (4aaebf66b6894d2b9f1454dc785e0d9c) in parent group-v447541. [ 861.502274] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Creating folder: Instances. Parent ref: group-v447648. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.502511] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d6bd232-a75f-4779-b03f-4a15a65c6695 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.514770] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Created folder: Instances in parent group-v447648. [ 861.514937] env[62627]: DEBUG oslo.service.loopingcall [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.515167] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.515685] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd27e5e1-452c-4b6c-bead-727442160bb8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.536512] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.536512] env[62627]: value = "task-2194326" [ 861.536512] env[62627]: _type = "Task" [ 861.536512] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.544928] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194326, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.589278] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 861.589465] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 861.589530] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Deleting the datastore file [datastore1] 8af5ca34-f9c4-4b68-b0f7-7163f388b778 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 861.589787] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2dc299e1-d795-4436-a504-385e6bcb51b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.597773] env[62627]: DEBUG oslo_vmware.api [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for the task: (returnval){ [ 861.597773] env[62627]: value = "task-2194327" [ 861.597773] env[62627]: _type = "Task" [ 861.597773] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.607324] env[62627]: DEBUG oslo_vmware.api [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194327, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.802538] env[62627]: DEBUG oslo_vmware.api [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Task: {'id': task-2194320, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.881661] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1a6726-6eea-4a67-bae0-52b8fbe741ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.909166] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.914811] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-323cfcce-50c6-45a1-af6c-63b3698e22c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.924766] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194322, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074446} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.926605] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 861.927197] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 861.927197] env[62627]: value = "task-2194328" [ 861.927197] env[62627]: _type = "Task" [ 861.927197] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.928360] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1b7407-f481-43aa-8c5f-3d3b9c67ad84 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.941400] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194328, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.958845] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] d28285ed-2ec0-43a4-bd34-82d33816fea4/d28285ed-2ec0-43a4-bd34-82d33816fea4.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 861.961477] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c169d850-9360-4d49-bd47-d274b905101a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.976123] env[62627]: DEBUG nova.objects.instance [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'pci_requests' on Instance uuid a12855e2-1544-4e85-a7ff-c026fec861b6 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 861.983918] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ed47f54-b821-49b1-85ff-7988a25704ef tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "ea289de9-9b2d-4756-a9a5-0e3dfe8db772" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.387s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.985701] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 861.985701] env[62627]: value = "task-2194329" [ 861.985701] env[62627]: _type = "Task" [ 861.985701] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.997268] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194329, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.048332] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194326, 'name': CreateVM_Task, 'duration_secs': 0.424955} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.048332] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 862.048332] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.048332] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.048332] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 862.048332] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f701f26-0bab-47ae-8b2b-cfc6237f5fb9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.053885] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for the task: (returnval){ [ 862.053885] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5255896e-0a00-6e67-f3de-73c8fe2fd32c" [ 862.053885] env[62627]: _type = "Task" [ 862.053885] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.062434] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5255896e-0a00-6e67-f3de-73c8fe2fd32c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.108637] env[62627]: DEBUG oslo_vmware.api [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Task: {'id': task-2194327, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192156} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.108911] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.109170] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.109397] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.109597] env[62627]: INFO nova.compute.manager [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Took 1.17 seconds to destroy the instance on the hypervisor. [ 862.109869] env[62627]: DEBUG oslo.service.loopingcall [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.110104] env[62627]: DEBUG nova.compute.manager [-] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 862.110223] env[62627]: DEBUG nova.network.neutron [-] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.304169] env[62627]: DEBUG oslo_vmware.api [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Task: {'id': task-2194320, 'name': ReconfigVM_Task, 'duration_secs': 1.422274} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.304458] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Reconfigured VM instance instance-0000000a to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 862.309138] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-258d1a25-8e7f-43ba-a9bf-203a2254983f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.327212] env[62627]: DEBUG oslo_vmware.api [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Waiting for the task: (returnval){ [ 862.327212] env[62627]: value = "task-2194330" [ 862.327212] env[62627]: _type = "Task" [ 862.327212] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.337067] env[62627]: DEBUG oslo_vmware.api [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Task: {'id': task-2194330, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.442076] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194328, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.481688] env[62627]: DEBUG nova.objects.base [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 862.482041] env[62627]: DEBUG nova.network.neutron [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 862.498527] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194329, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.509618] env[62627]: DEBUG nova.network.neutron [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Updated VIF entry in instance network info cache for port 2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 862.510196] env[62627]: DEBUG nova.network.neutron [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Updating instance_info_cache with network_info: [{"id": "2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f", "address": "fa:16:3e:17:96:86", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2807cd3b-e8", "ovs_interfaceid": "2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.570032] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5255896e-0a00-6e67-f3de-73c8fe2fd32c, 'name': SearchDatastore_Task, 'duration_secs': 0.033462} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.573626] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.573865] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.574153] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.574302] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.575139] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.575139] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4fdfac7-78b6-4aaa-ad05-4e0c0829c2da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.591635] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.591861] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 862.592640] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90a6227a-f54b-4ca6-ae09-d843f58075cf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.603127] env[62627]: DEBUG nova.policy [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5e5eee141e496db2d2ea316c43e6be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e4f7ad3ebcf4c4d97bdbce58eec5a09', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 862.605880] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for the task: (returnval){ [ 862.605880] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52675d71-4374-22fe-0cb6-e96a9cf62c27" [ 862.605880] env[62627]: _type = "Task" [ 862.605880] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.614150] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52675d71-4374-22fe-0cb6-e96a9cf62c27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.843881] env[62627]: DEBUG oslo_vmware.api [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Task: {'id': task-2194330, 'name': ReconfigVM_Task, 'duration_secs': 0.240868} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.843881] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447644', 'volume_id': '8911edc0-40ae-43db-99b6-9eec97446da5', 'name': 'volume-8911edc0-40ae-43db-99b6-9eec97446da5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db8ddb7b-b119-415c-98d9-dc5b3d2868b7', 'attached_at': '', 'detached_at': '', 'volume_id': '8911edc0-40ae-43db-99b6-9eec97446da5', 'serial': '8911edc0-40ae-43db-99b6-9eec97446da5'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 862.944613] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194328, 'name': PowerOffVM_Task, 'duration_secs': 0.601182} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.945050] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.950608] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Reconfiguring VM instance instance-00000020 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 862.953513] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6160353-2d8d-4a08-acc5-7038e3f70d36 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.977651] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 862.977651] env[62627]: value = "task-2194331" [ 862.977651] env[62627]: _type = "Task" [ 862.977651] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.994479] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194331, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.003693] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194329, 'name': ReconfigVM_Task, 'duration_secs': 0.792128} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.004099] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Reconfigured VM instance instance-00000025 to attach disk [datastore1] d28285ed-2ec0-43a4-bd34-82d33816fea4/d28285ed-2ec0-43a4-bd34-82d33816fea4.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 863.004875] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2da45755-fc5f-4f88-913a-07ec5288470b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.013551] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 863.013551] env[62627]: value = "task-2194332" [ 863.013551] env[62627]: _type = "Task" [ 863.013551] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.017062] env[62627]: DEBUG oslo_concurrency.lockutils [req-04118363-0368-456c-97cd-feecbcdd733e req-3def2994-a680-4e05-bf88-8b3b03a75eeb service nova] Releasing lock "refresh_cache-5f0240da-bec1-4bf3-a975-6ed3d0630a2d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.023628] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194332, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.064960] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328df47c-cccb-459a-90e9-ee129755a511 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.073233] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a848667-9a45-41a2-a6c2-b3d104069ed6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.105139] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176aad89-40ad-43f5-8f41-797a2da83843 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.119746] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab38a7d-e14a-4192-9035-cc6f065880b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.123659] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52675d71-4374-22fe-0cb6-e96a9cf62c27, 'name': SearchDatastore_Task, 'duration_secs': 0.022762} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.124889] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dc6b3aa-bfcb-479c-97c2-15bf3604d7b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.135829] env[62627]: DEBUG nova.compute.provider_tree [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.142192] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for the task: (returnval){ [ 863.142192] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f939b8-7274-3949-cc91-ce4db154bfdc" [ 863.142192] env[62627]: _type = "Task" [ 863.142192] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.152497] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f939b8-7274-3949-cc91-ce4db154bfdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.244325] env[62627]: DEBUG nova.network.neutron [-] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.259062] env[62627]: DEBUG nova.network.neutron [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Successfully updated port: f91eb28c-7881-4e2c-b51e-7c0f169652d1 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 863.413970] env[62627]: DEBUG nova.objects.instance [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Lazy-loading 'flavor' on Instance uuid db8ddb7b-b119-415c-98d9-dc5b3d2868b7 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.493112] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194331, 'name': ReconfigVM_Task, 'duration_secs': 0.3342} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.493498] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Reconfigured VM instance instance-00000020 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 863.493756] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 863.494107] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01f8f036-e415-465d-950c-a2cf66aaadcd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.500221] env[62627]: DEBUG nova.network.neutron [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Successfully created port: 202d765c-7217-40e5-9216-6ffe27003834 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.505032] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 863.505032] env[62627]: value = "task-2194333" [ 863.505032] env[62627]: _type = "Task" [ 863.505032] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.517981] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194333, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.527267] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194332, 'name': Rename_Task, 'duration_secs': 0.177499} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.527552] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 863.527820] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b9f006e-811f-48fb-a13d-a6b333f8664b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.535403] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 863.535403] env[62627]: value = "task-2194334" [ 863.535403] env[62627]: _type = "Task" [ 863.535403] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.546706] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194334, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.642022] env[62627]: DEBUG nova.scheduler.client.report [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 863.658461] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f939b8-7274-3949-cc91-ce4db154bfdc, 'name': SearchDatastore_Task, 'duration_secs': 0.013927} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.658906] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.659323] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 5f0240da-bec1-4bf3-a975-6ed3d0630a2d/5f0240da-bec1-4bf3-a975-6ed3d0630a2d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.659765] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e259a93-205b-4ca4-bb18-48b5fc4c9f9e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.685332] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for the task: (returnval){ [ 863.685332] env[62627]: value = "task-2194335" [ 863.685332] env[62627]: _type = "Task" [ 863.685332] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.692206] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194335, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.748220] env[62627]: INFO nova.compute.manager [-] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Took 1.64 seconds to deallocate network for instance. [ 863.763210] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "refresh_cache-1129349d-65db-4e1d-bb08-cea793adf996" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.763210] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired lock "refresh_cache-1129349d-65db-4e1d-bb08-cea793adf996" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.763210] env[62627]: DEBUG nova.network.neutron [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 863.957962] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5226bf73-fad4-14be-e37e-63a3da716528/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 863.959029] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492d724e-7eb9-4b66-bc06-3b907f94471e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.968109] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5226bf73-fad4-14be-e37e-63a3da716528/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 863.968298] env[62627]: ERROR oslo_vmware.rw_handles [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5226bf73-fad4-14be-e37e-63a3da716528/disk-0.vmdk due to incomplete transfer. [ 863.968640] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-101eb171-5bfa-48be-a44f-e4961a3afb9b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.981979] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5226bf73-fad4-14be-e37e-63a3da716528/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 863.982261] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Uploaded image 23f5f7b2-7c7c-4574-9060-cdeebd60d98b to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 863.983848] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 863.984243] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d05c9f39-c71c-419b-b631-8fdae620ac67 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.995917] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 863.995917] env[62627]: value = "task-2194336" [ 863.995917] env[62627]: _type = "Task" [ 863.995917] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.008173] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194336, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.018991] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194333, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.047342] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194334, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.152370] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.152962] env[62627]: DEBUG nova.compute.manager [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 864.156162] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 34.745s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.195930] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194335, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.258094] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.303460] env[62627]: DEBUG nova.compute.manager [req-9d815c0d-a695-411e-a9a5-c54f29c771e1 req-88bf7d3b-0951-40ef-bf6c-fcce9c2e456c service nova] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Received event network-vif-deleted-bb608229-71bb-45a8-8815-5bb1cda6c5b0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 864.308231] env[62627]: DEBUG nova.network.neutron [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 864.365730] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.366218] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.421310] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ce13407a-8c0c-42a9-a0b2-80db343afde3 tempest-VolumesAssistedSnapshotsTest-1158855689 tempest-VolumesAssistedSnapshotsTest-1158855689-project-admin] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.312s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.493656] env[62627]: DEBUG nova.network.neutron [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Updating instance_info_cache with network_info: [{"id": "f91eb28c-7881-4e2c-b51e-7c0f169652d1", "address": "fa:16:3e:d0:b1:d8", "network": {"id": "6868e7f7-6e86-4ef3-a3f2-82b652203315", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-729196523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ad7b328aba44b51aec9455bada6678c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf91eb28c-78", "ovs_interfaceid": "f91eb28c-7881-4e2c-b51e-7c0f169652d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.506753] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194336, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.517318] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194333, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.546546] env[62627]: DEBUG oslo_vmware.api [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194334, 'name': PowerOnVM_Task, 'duration_secs': 0.583695} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.546815] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 864.547053] env[62627]: INFO nova.compute.manager [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Took 8.80 seconds to spawn the instance on the hypervisor. [ 864.547240] env[62627]: DEBUG nova.compute.manager [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 864.547989] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95bc99c-6aa2-4984-87fb-8840cdb07f30 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.660282] env[62627]: DEBUG nova.compute.utils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 864.674072] env[62627]: DEBUG nova.compute.manager [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 864.675872] env[62627]: DEBUG nova.compute.manager [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 864.676120] env[62627]: DEBUG nova.network.neutron [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.697425] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194335, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647782} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.697599] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 5f0240da-bec1-4bf3-a975-6ed3d0630a2d/5f0240da-bec1-4bf3-a975-6ed3d0630a2d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 864.697731] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.698101] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7b28941-7be0-42a3-9267-7ae76eca86ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.707256] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for the task: (returnval){ [ 864.707256] env[62627]: value = "task-2194337" [ 864.707256] env[62627]: _type = "Task" [ 864.707256] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.716529] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194337, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.745895] env[62627]: DEBUG nova.policy [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '991f6f2c63da4e5cb3aff0dabe1d5968', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e06a7550ef84edd9861cafa0bfcb857', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 864.997497] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Releasing lock "refresh_cache-1129349d-65db-4e1d-bb08-cea793adf996" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.997702] env[62627]: DEBUG nova.compute.manager [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Instance network_info: |[{"id": "f91eb28c-7881-4e2c-b51e-7c0f169652d1", "address": "fa:16:3e:d0:b1:d8", "network": {"id": "6868e7f7-6e86-4ef3-a3f2-82b652203315", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-729196523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ad7b328aba44b51aec9455bada6678c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf91eb28c-78", "ovs_interfaceid": "f91eb28c-7881-4e2c-b51e-7c0f169652d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 864.998157] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:b1:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f91eb28c-7881-4e2c-b51e-7c0f169652d1', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.005960] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Creating folder: Project (6ad7b328aba44b51aec9455bada6678c). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.009242] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd339304-1bf8-45db-8814-9aa574636689 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.019411] env[62627]: DEBUG oslo_vmware.api [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194333, 'name': PowerOnVM_Task, 'duration_secs': 1.298086} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.022464] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 865.022698] env[62627]: DEBUG nova.compute.manager [None req-8081bc04-f014-4cc7-a3be-c15165fd2dd3 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 865.022970] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194336, 'name': Destroy_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.023720] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256fc623-2017-4736-b7bc-44f3ebffa4b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.029271] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Created folder: Project (6ad7b328aba44b51aec9455bada6678c) in parent group-v447541. [ 865.029471] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Creating folder: Instances. Parent ref: group-v447651. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.030913] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8443ad1-6a69-455b-8153-b3f5204cccb9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.044022] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Created folder: Instances in parent group-v447651. [ 865.044022] env[62627]: DEBUG oslo.service.loopingcall [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.044022] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.044022] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0fc57736-f268-444e-9a3c-3bce393de1ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.064220] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.064220] env[62627]: value = "task-2194340" [ 865.064220] env[62627]: _type = "Task" [ 865.064220] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.069208] env[62627]: INFO nova.compute.manager [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Took 48.45 seconds to build instance. [ 865.076615] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194340, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.205844] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance a674db15-ea4e-44c8-abc1-6af425286517 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.206041] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 7366e4b0-bdba-435d-844d-8e81c4464b31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.206208] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance c1fd6afb-9332-446d-a441-5942b60edc94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.206378] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 77c60059-535a-47ca-a5b0-45cc1df93b45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.206621] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance db8ddb7b-b119-415c-98d9-dc5b3d2868b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.206621] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 7691c2e2-4e43-4990-bf4d-2ef3298a4264 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.209953] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance a397bce6-30c7-485d-81ab-88a8e021996c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.209953] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance e5712962-ad8e-4cf2-aa47-275293609a13 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.209953] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance f09c685d-9103-4c94-a44a-266aacc334f3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.209953] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 76fb58dc-df20-464b-bda0-dcf09d69c718 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.209953] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance ab999825-4310-4fd8-81f0-6bb5dde4175b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.210110] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance faa16f10-63f0-457c-8cd7-20a624454d86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.210110] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance a12855e2-1544-4e85-a7ff-c026fec861b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.210110] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance b1ba58c1-340f-486a-8f47-86fbc7b27485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.210110] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 8af5ca34-f9c4-4b68-b0f7-7163f388b778 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.210219] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 95ac4fbf-071a-403e-8cae-88d9899757ba is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.210219] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance cbd30c37-f6c3-4229-8b95-855cab64924c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.210219] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 241311b0-ba81-41d1-a9b6-cd40d04dbe3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.210219] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance aa181094-cb78-44e0-8d7a-1d677e0af1fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.210326] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.210326] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance aed62160-2db5-4008-a491-2b27ae81ef5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.210326] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 32a3b8bb-7c3a-4e30-8a50-37ce53e71882 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 865.210326] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance d28285ed-2ec0-43a4-bd34-82d33816fea4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.210442] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 5f0240da-bec1-4bf3-a975-6ed3d0630a2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.210442] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 1129349d-65db-4e1d-bb08-cea793adf996 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.210442] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 865.234382] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194337, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.177959} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.234522] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.235723] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594eee60-85bb-40a5-a692-f2a553924ab8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.273350] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 5f0240da-bec1-4bf3-a975-6ed3d0630a2d/5f0240da-bec1-4bf3-a975-6ed3d0630a2d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.273753] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78224643-d1f9-4fc8-aca7-49fc356654c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.295573] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for the task: (returnval){ [ 865.295573] env[62627]: value = "task-2194342" [ 865.295573] env[62627]: _type = "Task" [ 865.295573] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.305770] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194342, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.415164] env[62627]: DEBUG nova.network.neutron [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Successfully created port: 4eb23b03-8a91-47c9-8220-f878ec2488f3 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.492791] env[62627]: DEBUG nova.network.neutron [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Successfully updated port: 202d765c-7217-40e5-9216-6ffe27003834 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.510572] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194336, 'name': Destroy_Task, 'duration_secs': 1.255844} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.510818] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Destroyed the VM [ 865.511097] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 865.511366] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a0af8779-c20e-4006-9586-6b19cc8b1cf1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.520063] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 865.520063] env[62627]: value = "task-2194343" [ 865.520063] env[62627]: _type = "Task" [ 865.520063] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.529454] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194343, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.571605] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6ee298e0-8966-4275-82fa-797da2540e1c tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "d28285ed-2ec0-43a4-bd34-82d33816fea4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.127s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.579583] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194340, 'name': CreateVM_Task, 'duration_secs': 0.411939} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.579775] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 865.580504] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.580692] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.581051] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 865.581487] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-726596d3-a5d5-4f92-8c3a-8dc7c4d776e7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.589581] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 865.589581] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5278d361-df19-87b8-dfb1-9e6d2f1d94f9" [ 865.589581] env[62627]: _type = "Task" [ 865.589581] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.599563] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5278d361-df19-87b8-dfb1-9e6d2f1d94f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.690613] env[62627]: DEBUG nova.compute.manager [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 865.711708] env[62627]: DEBUG nova.virt.hardware [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 865.711955] env[62627]: DEBUG nova.virt.hardware [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.712171] env[62627]: DEBUG nova.virt.hardware [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 865.712369] env[62627]: DEBUG nova.virt.hardware [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.712514] env[62627]: DEBUG nova.virt.hardware [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 865.712659] env[62627]: DEBUG nova.virt.hardware [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 865.712862] env[62627]: DEBUG nova.virt.hardware [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 865.713028] env[62627]: DEBUG nova.virt.hardware [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 865.713239] env[62627]: DEBUG nova.virt.hardware [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 865.713413] env[62627]: DEBUG nova.virt.hardware [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 865.713584] env[62627]: DEBUG nova.virt.hardware [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 865.714501] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9cab1a-9c17-49e4-af0d-5c7ca1770d35 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.719658] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 865.724187] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f788a1e-817d-4adc-9d80-ff3a0642de81 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.806541] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194342, 'name': ReconfigVM_Task, 'duration_secs': 0.341844} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.806831] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 5f0240da-bec1-4bf3-a975-6ed3d0630a2d/5f0240da-bec1-4bf3-a975-6ed3d0630a2d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.807473] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d37fadf3-12f0-43fb-93f0-601d360f2eb6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.815818] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for the task: (returnval){ [ 865.815818] env[62627]: value = "task-2194344" [ 865.815818] env[62627]: _type = "Task" [ 865.815818] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.824607] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194344, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.995897] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.996091] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.996297] env[62627]: DEBUG nova.network.neutron [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.033945] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194343, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.074741] env[62627]: DEBUG nova.compute.manager [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 866.101166] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5278d361-df19-87b8-dfb1-9e6d2f1d94f9, 'name': SearchDatastore_Task, 'duration_secs': 0.02852} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.101474] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.101778] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.102076] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.102281] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.102501] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.102803] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23c70cb8-e353-4f50-b243-b6abcf322940 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.119934] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.119934] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.119934] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc3c8ddc-bcd9-485b-a5d4-8136a1fb16a1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.125889] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 866.125889] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]524c403d-3ff2-6a6e-755d-3f4373e68a33" [ 866.125889] env[62627]: _type = "Task" [ 866.125889] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.133793] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524c403d-3ff2-6a6e-755d-3f4373e68a33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.224180] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 574293e3-8ec8-410f-8a30-83bc60d25d64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 866.328257] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194344, 'name': Rename_Task, 'duration_secs': 0.142778} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.328529] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.328809] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-133bd734-517d-425e-a064-cd750e5b4da2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.337652] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for the task: (returnval){ [ 866.337652] env[62627]: value = "task-2194345" [ 866.337652] env[62627]: _type = "Task" [ 866.337652] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.346391] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194345, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.533884] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194343, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.548838] env[62627]: WARNING nova.network.neutron [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] a1c5c227-8f1f-4881-a6d8-f446b64d9846 already exists in list: networks containing: ['a1c5c227-8f1f-4881-a6d8-f446b64d9846']. ignoring it [ 866.601422] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.637508] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524c403d-3ff2-6a6e-755d-3f4373e68a33, 'name': SearchDatastore_Task, 'duration_secs': 0.030906} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.638396] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43dcf001-0592-4a14-9c38-96f5a00671b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.650809] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 866.650809] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52877e37-0602-835d-7860-17eea6dbbe55" [ 866.650809] env[62627]: _type = "Task" [ 866.650809] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.660429] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52877e37-0602-835d-7860-17eea6dbbe55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.727423] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 02216be1-cdb9-421a-8a23-4a919b376595 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 866.849283] env[62627]: DEBUG oslo_vmware.api [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194345, 'name': PowerOnVM_Task, 'duration_secs': 0.452405} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.849584] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.849793] env[62627]: INFO nova.compute.manager [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Took 8.26 seconds to spawn the instance on the hypervisor. [ 866.849980] env[62627]: DEBUG nova.compute.manager [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 866.851538] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcb0ea2-a6bb-419a-98f4-daedf0da66ce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.033851] env[62627]: DEBUG oslo_vmware.api [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194343, 'name': RemoveSnapshot_Task, 'duration_secs': 1.134865} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.034178] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 867.034512] env[62627]: INFO nova.compute.manager [None req-c4c2bcd7-2e48-41e6-9eb5-a66eadcde0c4 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Took 16.63 seconds to snapshot the instance on the hypervisor. [ 867.082486] env[62627]: DEBUG nova.network.neutron [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Updating instance_info_cache with network_info: [{"id": "21d30d49-10fb-462d-9b5f-7525c3497178", "address": "fa:16:3e:9b:65:45", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d30d49-10", "ovs_interfaceid": "21d30d49-10fb-462d-9b5f-7525c3497178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "202d765c-7217-40e5-9216-6ffe27003834", "address": "fa:16:3e:ee:de:71", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap202d765c-72", "ovs_interfaceid": "202d765c-7217-40e5-9216-6ffe27003834", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.162851] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52877e37-0602-835d-7860-17eea6dbbe55, 'name': SearchDatastore_Task, 'duration_secs': 0.01126} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.163149] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.163417] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 1129349d-65db-4e1d-bb08-cea793adf996/1129349d-65db-4e1d-bb08-cea793adf996.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.163715] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c229e910-5368-4149-8407-ea3fc00a0d04 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.172852] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 867.172852] env[62627]: value = "task-2194346" [ 867.172852] env[62627]: _type = "Task" [ 867.172852] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.181687] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194346, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.234256] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance f1b70379-e13d-4007-ac2d-3d2d43cd5602 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 867.307300] env[62627]: DEBUG nova.network.neutron [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Successfully updated port: 4eb23b03-8a91-47c9-8220-f878ec2488f3 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.370695] env[62627]: INFO nova.compute.manager [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Took 46.03 seconds to build instance. [ 867.433680] env[62627]: DEBUG nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Received event network-vif-plugged-f91eb28c-7881-4e2c-b51e-7c0f169652d1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 867.433890] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Acquiring lock "1129349d-65db-4e1d-bb08-cea793adf996-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.434213] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Lock "1129349d-65db-4e1d-bb08-cea793adf996-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.434468] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Lock "1129349d-65db-4e1d-bb08-cea793adf996-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.434715] env[62627]: DEBUG nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] No waiting events found dispatching network-vif-plugged-f91eb28c-7881-4e2c-b51e-7c0f169652d1 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 867.434990] env[62627]: WARNING nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Received unexpected event network-vif-plugged-f91eb28c-7881-4e2c-b51e-7c0f169652d1 for instance with vm_state building and task_state spawning. [ 867.435228] env[62627]: DEBUG nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Received event network-changed-f91eb28c-7881-4e2c-b51e-7c0f169652d1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 867.435460] env[62627]: DEBUG nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Refreshing instance network info cache due to event network-changed-f91eb28c-7881-4e2c-b51e-7c0f169652d1. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 867.435723] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Acquiring lock "refresh_cache-1129349d-65db-4e1d-bb08-cea793adf996" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.436601] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Acquired lock "refresh_cache-1129349d-65db-4e1d-bb08-cea793adf996" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.436601] env[62627]: DEBUG nova.network.neutron [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Refreshing network info cache for port f91eb28c-7881-4e2c-b51e-7c0f169652d1 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 867.585070] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.585612] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.586917] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.586917] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f1be6e-a779-4088-9944-7feb5723c431 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.607783] env[62627]: DEBUG nova.virt.hardware [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 867.608052] env[62627]: DEBUG nova.virt.hardware [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.608212] env[62627]: DEBUG nova.virt.hardware [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 867.608395] env[62627]: DEBUG nova.virt.hardware [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.608540] env[62627]: DEBUG nova.virt.hardware [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 867.608683] env[62627]: DEBUG nova.virt.hardware [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 867.609035] env[62627]: DEBUG nova.virt.hardware [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 867.609190] env[62627]: DEBUG nova.virt.hardware [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 867.609358] env[62627]: DEBUG nova.virt.hardware [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 867.609519] env[62627]: DEBUG nova.virt.hardware [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 867.609691] env[62627]: DEBUG nova.virt.hardware [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 867.616199] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Reconfiguring VM to attach interface {{(pid=62627) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 867.616559] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32b118c1-9266-4461-807e-f4cd27e1832c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.636498] env[62627]: DEBUG oslo_vmware.api [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 867.636498] env[62627]: value = "task-2194347" [ 867.636498] env[62627]: _type = "Task" [ 867.636498] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.646808] env[62627]: DEBUG oslo_vmware.api [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194347, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.684043] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194346, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.736891] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 892fc84b-02a9-4fa0-81b5-80326a060c64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 867.810152] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "refresh_cache-4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.810330] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired lock "refresh_cache-4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.810570] env[62627]: DEBUG nova.network.neutron [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.873089] env[62627]: DEBUG oslo_concurrency.lockutils [None req-855f1cb1-2e3c-47b0-a24a-22ec236e0b4f tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.705s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.152505] env[62627]: DEBUG oslo_vmware.api [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.184834] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194346, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.215419] env[62627]: DEBUG nova.network.neutron [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Updated VIF entry in instance network info cache for port f91eb28c-7881-4e2c-b51e-7c0f169652d1. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 868.215820] env[62627]: DEBUG nova.network.neutron [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Updating instance_info_cache with network_info: [{"id": "f91eb28c-7881-4e2c-b51e-7c0f169652d1", "address": "fa:16:3e:d0:b1:d8", "network": {"id": "6868e7f7-6e86-4ef3-a3f2-82b652203315", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-729196523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ad7b328aba44b51aec9455bada6678c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf91eb28c-78", "ovs_interfaceid": "f91eb28c-7881-4e2c-b51e-7c0f169652d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.240264] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance e76bdb57-4a30-4222-ad4c-d95e4e6193cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.303868] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "d28285ed-2ec0-43a4-bd34-82d33816fea4" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.304216] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "d28285ed-2ec0-43a4-bd34-82d33816fea4" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.304455] env[62627]: INFO nova.compute.manager [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Rebooting instance [ 868.342756] env[62627]: DEBUG nova.network.neutron [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.375992] env[62627]: DEBUG nova.compute.manager [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 868.491499] env[62627]: DEBUG nova.network.neutron [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Updating instance_info_cache with network_info: [{"id": "4eb23b03-8a91-47c9-8220-f878ec2488f3", "address": "fa:16:3e:1b:e0:9d", "network": {"id": "c8f2b47a-fb3f-4fb5-a55a-d527bd4b6f03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2107012038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e06a7550ef84edd9861cafa0bfcb857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4eb23b03-8a", "ovs_interfaceid": "4eb23b03-8a91-47c9-8220-f878ec2488f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.648692] env[62627]: DEBUG oslo_vmware.api [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194347, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.686297] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194346, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.025814} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.686579] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 1129349d-65db-4e1d-bb08-cea793adf996/1129349d-65db-4e1d-bb08-cea793adf996.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.686790] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.687059] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58f437ff-3d0d-4dce-a5f4-77223cabac07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.694309] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 868.694309] env[62627]: value = "task-2194348" [ 868.694309] env[62627]: _type = "Task" [ 868.694309] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.703553] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194348, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.718331] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Releasing lock "refresh_cache-1129349d-65db-4e1d-bb08-cea793adf996" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.718679] env[62627]: DEBUG nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Received event network-vif-plugged-202d765c-7217-40e5-9216-6ffe27003834 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 868.718937] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Acquiring lock "a12855e2-1544-4e85-a7ff-c026fec861b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.719200] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Lock "a12855e2-1544-4e85-a7ff-c026fec861b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.719377] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Lock "a12855e2-1544-4e85-a7ff-c026fec861b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.719564] env[62627]: DEBUG nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] No waiting events found dispatching network-vif-plugged-202d765c-7217-40e5-9216-6ffe27003834 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 868.719712] env[62627]: WARNING nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Received unexpected event network-vif-plugged-202d765c-7217-40e5-9216-6ffe27003834 for instance with vm_state active and task_state None. [ 868.719890] env[62627]: DEBUG nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Received event network-changed-202d765c-7217-40e5-9216-6ffe27003834 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 868.720071] env[62627]: DEBUG nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Refreshing instance network info cache due to event network-changed-202d765c-7217-40e5-9216-6ffe27003834. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 868.720242] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Acquiring lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.720391] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Acquired lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.720610] env[62627]: DEBUG nova.network.neutron [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Refreshing network info cache for port 202d765c-7217-40e5-9216-6ffe27003834 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.745377] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.830134] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.830362] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquired lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.830546] env[62627]: DEBUG nova.network.neutron [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 868.899585] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.995397] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Releasing lock "refresh_cache-4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.995762] env[62627]: DEBUG nova.compute.manager [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Instance network_info: |[{"id": "4eb23b03-8a91-47c9-8220-f878ec2488f3", "address": "fa:16:3e:1b:e0:9d", "network": {"id": "c8f2b47a-fb3f-4fb5-a55a-d527bd4b6f03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2107012038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e06a7550ef84edd9861cafa0bfcb857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4eb23b03-8a", "ovs_interfaceid": "4eb23b03-8a91-47c9-8220-f878ec2488f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 868.996727] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:e0:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4eb23b03-8a91-47c9-8220-f878ec2488f3', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.004432] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Creating folder: Project (2e06a7550ef84edd9861cafa0bfcb857). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.004700] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3ef7d49-1573-4d06-90b6-1eaf24eed3b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.020549] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Created folder: Project (2e06a7550ef84edd9861cafa0bfcb857) in parent group-v447541. [ 869.020549] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Creating folder: Instances. Parent ref: group-v447654. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.020698] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d224eec4-af1d-49fa-9bc7-a6c14d7ec5fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.033646] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Created folder: Instances in parent group-v447654. [ 869.033646] env[62627]: DEBUG oslo.service.loopingcall [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.033878] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.033926] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9bbaf478-40c0-434e-be2a-42bc2e26e0af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.055903] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.055903] env[62627]: value = "task-2194351" [ 869.055903] env[62627]: _type = "Task" [ 869.055903] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.064398] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194351, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.147787] env[62627]: DEBUG oslo_vmware.api [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194347, 'name': ReconfigVM_Task, 'duration_secs': 1.130436} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.148342] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.148570] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Reconfigured VM to attach interface {{(pid=62627) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 869.208088] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194348, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.202818} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.208468] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.209203] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0350cb21-b6ee-4f68-a925-18d268fdd66d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.236511] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 1129349d-65db-4e1d-bb08-cea793adf996/1129349d-65db-4e1d-bb08-cea793adf996.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.239366] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2d85f33-a2a5-4c35-a56f-36e6d99d7d61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.258636] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance c9088ac9-456b-4c92-8773-79c263e7cc2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 869.265246] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 869.265246] env[62627]: value = "task-2194352" [ 869.265246] env[62627]: _type = "Task" [ 869.265246] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.277025] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194352, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.423468] env[62627]: DEBUG nova.compute.manager [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 869.425481] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb5bc8e-e74a-49a8-bd80-0571b7e37a55 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.570284] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194351, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.653508] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647f694d-9af8-4055-9905-bd3791563680 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-a12855e2-1544-4e85-a7ff-c026fec861b6-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.327s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.718625] env[62627]: DEBUG nova.network.neutron [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Updated VIF entry in instance network info cache for port 202d765c-7217-40e5-9216-6ffe27003834. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 869.719041] env[62627]: DEBUG nova.network.neutron [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Updating instance_info_cache with network_info: [{"id": "21d30d49-10fb-462d-9b5f-7525c3497178", "address": "fa:16:3e:9b:65:45", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d30d49-10", "ovs_interfaceid": "21d30d49-10fb-462d-9b5f-7525c3497178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "202d765c-7217-40e5-9216-6ffe27003834", "address": "fa:16:3e:ee:de:71", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap202d765c-72", "ovs_interfaceid": "202d765c-7217-40e5-9216-6ffe27003834", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.758604] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquiring lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.758847] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.759083] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquiring lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.759287] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.759457] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.761307] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 7804914c-7df8-45c2-a934-67aa73afb2e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 869.762580] env[62627]: INFO nova.compute.manager [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Terminating instance [ 869.776642] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194352, 'name': ReconfigVM_Task, 'duration_secs': 0.383617} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.777792] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 1129349d-65db-4e1d-bb08-cea793adf996/1129349d-65db-4e1d-bb08-cea793adf996.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.778431] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-490c705b-e2dd-4c26-ba1d-741e13f99130 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.786853] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 869.786853] env[62627]: value = "task-2194353" [ 869.786853] env[62627]: _type = "Task" [ 869.786853] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.797696] env[62627]: DEBUG nova.network.neutron [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Updating instance_info_cache with network_info: [{"id": "bf72690a-0358-469d-8d21-85ef459cbcb9", "address": "fa:16:3e:16:7f:81", "network": {"id": "d75e7b13-f4ab-4626-9799-4970d691f831", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-781733128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca351451d2344fc1872f6faf89412354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf72690a-03", "ovs_interfaceid": "bf72690a-0358-469d-8d21-85ef459cbcb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.804217] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194353, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.897530] env[62627]: DEBUG nova.compute.manager [None req-ca8fb86f-648a-465c-a580-12f0f3c05eb8 tempest-ServerExternalEventsTest-1108718225 tempest-ServerExternalEventsTest-1108718225-project] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Received event network-changed {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 869.897725] env[62627]: DEBUG nova.compute.manager [None req-ca8fb86f-648a-465c-a580-12f0f3c05eb8 tempest-ServerExternalEventsTest-1108718225 tempest-ServerExternalEventsTest-1108718225-project] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Refreshing instance network info cache due to event network-changed. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 869.897937] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ca8fb86f-648a-465c-a580-12f0f3c05eb8 tempest-ServerExternalEventsTest-1108718225 tempest-ServerExternalEventsTest-1108718225-project] Acquiring lock "refresh_cache-5f0240da-bec1-4bf3-a975-6ed3d0630a2d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.898097] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ca8fb86f-648a-465c-a580-12f0f3c05eb8 tempest-ServerExternalEventsTest-1108718225 tempest-ServerExternalEventsTest-1108718225-project] Acquired lock "refresh_cache-5f0240da-bec1-4bf3-a975-6ed3d0630a2d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.898264] env[62627]: DEBUG nova.network.neutron [None req-ca8fb86f-648a-465c-a580-12f0f3c05eb8 tempest-ServerExternalEventsTest-1108718225 tempest-ServerExternalEventsTest-1108718225-project] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.945666] env[62627]: INFO nova.compute.manager [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] instance snapshotting [ 869.948499] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec16742-5150-411c-87a7-e3031f651ddb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.969281] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2eabd22-15dd-4d71-8dc1-bebe6b8e6be0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.067159] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194351, 'name': CreateVM_Task, 'duration_secs': 0.602588} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.067342] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 870.068047] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.068220] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.068547] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 870.068801] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-854ef977-7dd3-4594-8019-ab8b4fff86e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.073915] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 870.073915] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5257e6fe-1146-105e-77a9-b493c1c31236" [ 870.073915] env[62627]: _type = "Task" [ 870.073915] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.082150] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5257e6fe-1146-105e-77a9-b493c1c31236, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.201902] env[62627]: DEBUG nova.compute.manager [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Received event network-vif-plugged-4eb23b03-8a91-47c9-8220-f878ec2488f3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 870.202242] env[62627]: DEBUG oslo_concurrency.lockutils [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] Acquiring lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.202436] env[62627]: DEBUG oslo_concurrency.lockutils [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] Lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.202610] env[62627]: DEBUG oslo_concurrency.lockutils [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] Lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.202776] env[62627]: DEBUG nova.compute.manager [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] No waiting events found dispatching network-vif-plugged-4eb23b03-8a91-47c9-8220-f878ec2488f3 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 870.202998] env[62627]: WARNING nova.compute.manager [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Received unexpected event network-vif-plugged-4eb23b03-8a91-47c9-8220-f878ec2488f3 for instance with vm_state building and task_state spawning. [ 870.203095] env[62627]: DEBUG nova.compute.manager [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Received event network-changed-4eb23b03-8a91-47c9-8220-f878ec2488f3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 870.203285] env[62627]: DEBUG nova.compute.manager [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Refreshing instance network info cache due to event network-changed-4eb23b03-8a91-47c9-8220-f878ec2488f3. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 870.203475] env[62627]: DEBUG oslo_concurrency.lockutils [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] Acquiring lock "refresh_cache-4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.203608] env[62627]: DEBUG oslo_concurrency.lockutils [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] Acquired lock "refresh_cache-4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.203765] env[62627]: DEBUG nova.network.neutron [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Refreshing network info cache for port 4eb23b03-8a91-47c9-8220-f878ec2488f3 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.225314] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Releasing lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.225639] env[62627]: DEBUG nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Received event network-changed-bf72690a-0358-469d-8d21-85ef459cbcb9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 870.225785] env[62627]: DEBUG nova.compute.manager [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Refreshing instance network info cache due to event network-changed-bf72690a-0358-469d-8d21-85ef459cbcb9. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 870.226010] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Acquiring lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.265267] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance d0786233-087b-4000-8c37-fd5c55cc0ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 870.265595] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 870.265745] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 870.268716] env[62627]: DEBUG nova.compute.manager [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 870.269049] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 870.269827] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b985e4a-8434-4048-acd0-312cef185c6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.279212] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 870.279459] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb954e52-f223-4c1a-883f-a245adc51c52 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.286398] env[62627]: DEBUG oslo_vmware.api [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for the task: (returnval){ [ 870.286398] env[62627]: value = "task-2194354" [ 870.286398] env[62627]: _type = "Task" [ 870.286398] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.302645] env[62627]: DEBUG oslo_vmware.api [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2194354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.305944] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Releasing lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.307207] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194353, 'name': Rename_Task, 'duration_secs': 0.22173} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.309589] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Acquired lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.309782] env[62627]: DEBUG nova.network.neutron [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Refreshing network info cache for port bf72690a-0358-469d-8d21-85ef459cbcb9 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.310916] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.311531] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-202e51ea-2f33-4d57-bafa-919b62a41dbe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.319704] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 870.319704] env[62627]: value = "task-2194355" [ 870.319704] env[62627]: _type = "Task" [ 870.319704] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.332151] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194355, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.485320] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 870.487722] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-df9759c7-b147-46ce-bfe6-21f9f169d58c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.500526] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 870.500526] env[62627]: value = "task-2194356" [ 870.500526] env[62627]: _type = "Task" [ 870.500526] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.511937] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194356, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.592028] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5257e6fe-1146-105e-77a9-b493c1c31236, 'name': SearchDatastore_Task, 'duration_secs': 0.042523} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.593484] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.593818] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 870.594203] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.594395] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.594634] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.595185] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d736c58c-9532-4c36-aace-bd83bb6c7cdc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.611994] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.612360] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 870.613394] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af23b0e8-9891-4173-9835-d143747c3c87 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.619719] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 870.619719] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529edd1b-2484-952c-c09d-786a3438dc0c" [ 870.619719] env[62627]: _type = "Task" [ 870.619719] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.631905] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529edd1b-2484-952c-c09d-786a3438dc0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.762951] env[62627]: DEBUG nova.network.neutron [None req-ca8fb86f-648a-465c-a580-12f0f3c05eb8 tempest-ServerExternalEventsTest-1108718225 tempest-ServerExternalEventsTest-1108718225-project] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Updating instance_info_cache with network_info: [{"id": "2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f", "address": "fa:16:3e:17:96:86", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.39", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2807cd3b-e8", "ovs_interfaceid": "2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.796879] env[62627]: DEBUG oslo_vmware.api [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2194354, 'name': PowerOffVM_Task, 'duration_secs': 0.318775} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.798058] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 870.798252] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 870.799025] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ea8a09-4533-47c8-baac-b86be0772f13 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.801476] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee948967-1764-4093-9a93-4626a2b19703 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.809133] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252d4383-6c2e-4845-8822-af3a5c5cc78b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.816266] env[62627]: DEBUG nova.compute.manager [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 870.817369] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1788176-e9b8-4a61-8fed-d5db10764bc6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.855699] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76542697-873a-43f5-8462-12a56ec9a454 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.872468] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194355, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.879597] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4521595-b2c0-4da4-9da6-856528055df9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.881999] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 870.882243] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 870.882440] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Deleting the datastore file [datastore2] db8ddb7b-b119-415c-98d9-dc5b3d2868b7 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.883130] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-781ea6e9-e623-4c3d-8f69-35743f9a9790 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.897083] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.902470] env[62627]: DEBUG oslo_vmware.api [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for the task: (returnval){ [ 870.902470] env[62627]: value = "task-2194358" [ 870.902470] env[62627]: _type = "Task" [ 870.902470] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.912283] env[62627]: DEBUG oslo_vmware.api [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2194358, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.012424] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194356, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.133024] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529edd1b-2484-952c-c09d-786a3438dc0c, 'name': SearchDatastore_Task, 'duration_secs': 0.01202} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.133024] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65e53192-d9e2-4f0d-af88-db194552d4d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.138557] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 871.138557] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52af0f68-a226-bcb1-169c-5219c2e2abaf" [ 871.138557] env[62627]: _type = "Task" [ 871.138557] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.150020] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52af0f68-a226-bcb1-169c-5219c2e2abaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.265665] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ca8fb86f-648a-465c-a580-12f0f3c05eb8 tempest-ServerExternalEventsTest-1108718225 tempest-ServerExternalEventsTest-1108718225-project] Releasing lock "refresh_cache-5f0240da-bec1-4bf3-a975-6ed3d0630a2d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.274431] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquiring lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.274431] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.274636] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquiring lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.275012] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.275931] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.278031] env[62627]: INFO nova.compute.manager [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Terminating instance [ 871.365117] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194355, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.396747] env[62627]: DEBUG nova.network.neutron [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Updated VIF entry in instance network info cache for port 4eb23b03-8a91-47c9-8220-f878ec2488f3. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.397288] env[62627]: DEBUG nova.network.neutron [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Updating instance_info_cache with network_info: [{"id": "4eb23b03-8a91-47c9-8220-f878ec2488f3", "address": "fa:16:3e:1b:e0:9d", "network": {"id": "c8f2b47a-fb3f-4fb5-a55a-d527bd4b6f03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2107012038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e06a7550ef84edd9861cafa0bfcb857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4eb23b03-8a", "ovs_interfaceid": "4eb23b03-8a91-47c9-8220-f878ec2488f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.404141] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 871.421242] env[62627]: DEBUG oslo_vmware.api [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Task: {'id': task-2194358, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144276} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.422171] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.422557] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 871.422841] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 871.423128] env[62627]: INFO nova.compute.manager [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 871.423459] env[62627]: DEBUG oslo.service.loopingcall [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.424403] env[62627]: DEBUG nova.compute.manager [-] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 871.424600] env[62627]: DEBUG nova.network.neutron [-] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 871.473470] env[62627]: DEBUG nova.network.neutron [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Updated VIF entry in instance network info cache for port bf72690a-0358-469d-8d21-85ef459cbcb9. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.473470] env[62627]: DEBUG nova.network.neutron [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Updating instance_info_cache with network_info: [{"id": "bf72690a-0358-469d-8d21-85ef459cbcb9", "address": "fa:16:3e:16:7f:81", "network": {"id": "d75e7b13-f4ab-4626-9799-4970d691f831", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-781733128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca351451d2344fc1872f6faf89412354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf72690a-03", "ovs_interfaceid": "bf72690a-0358-469d-8d21-85ef459cbcb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.511014] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194356, 'name': CreateSnapshot_Task, 'duration_secs': 0.716384} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.511290] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 871.512075] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb6b66f-6ccb-4c7b-9878-e6b8945583be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.650176] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52af0f68-a226-bcb1-169c-5219c2e2abaf, 'name': SearchDatastore_Task, 'duration_secs': 0.01164} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.650501] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.650797] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8/4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 871.651149] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-546a816c-15f8-4aa6-be71-52f3639e070f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.660065] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 871.660065] env[62627]: value = "task-2194359" [ 871.660065] env[62627]: _type = "Task" [ 871.660065] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.670591] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194359, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.781631] env[62627]: DEBUG nova.compute.manager [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 871.781848] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 871.782801] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9ab8be-2e78-429f-ad28-4616768294d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.791673] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 871.791673] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01bc6ad7-c98e-47be-ae89-4d2bd58a61f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.799106] env[62627]: DEBUG oslo_vmware.api [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for the task: (returnval){ [ 871.799106] env[62627]: value = "task-2194360" [ 871.799106] env[62627]: _type = "Task" [ 871.799106] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.820015] env[62627]: DEBUG oslo_vmware.api [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194360, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.868212] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194355, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.879620] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd71631-f679-4bbe-baf4-3a4a64ef4b4d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.890709] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Doing hard reboot of VM {{(pid=62627) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 871.890759] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-8d260df3-4d4a-44e1-95ba-03853a36aa83 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.902481] env[62627]: DEBUG oslo_concurrency.lockutils [req-943d3b80-8416-4e8a-8d94-01a65f02c2ab req-9d8153e3-979e-4756-b927-45f79629ad6a service nova] Releasing lock "refresh_cache-4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.903021] env[62627]: DEBUG oslo_vmware.api [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 871.903021] env[62627]: value = "task-2194361" [ 871.903021] env[62627]: _type = "Task" [ 871.903021] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.911191] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 871.911191] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.754s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.911191] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.039s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.911191] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.913118] env[62627]: DEBUG oslo_concurrency.lockutils [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.906s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.913254] env[62627]: DEBUG oslo_concurrency.lockutils [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.916397] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.605s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.916697] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.919420] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.961s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.920832] env[62627]: INFO nova.compute.claims [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.930426] env[62627]: DEBUG oslo_vmware.api [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194361, 'name': ResetVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.941811] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.942388] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.951415] env[62627]: INFO nova.scheduler.client.report [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Deleted allocations for instance 95ac4fbf-071a-403e-8cae-88d9899757ba [ 871.960475] env[62627]: INFO nova.scheduler.client.report [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Deleted allocations for instance 76fb58dc-df20-464b-bda0-dcf09d69c718 [ 871.972038] env[62627]: INFO nova.scheduler.client.report [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Deleted allocations for instance f09c685d-9103-4c94-a44a-266aacc334f3 [ 871.977669] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8ac1355-206f-46f5-92cb-f7cc0a129112 req-e18a0031-32b6-4d85-abe7-85396c9e6412 service nova] Releasing lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.033238] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 872.034195] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-85683910-243d-434c-8ebc-ea2e6a1c681c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.046960] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 872.046960] env[62627]: value = "task-2194362" [ 872.046960] env[62627]: _type = "Task" [ 872.046960] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.059699] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194362, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.172907] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194359, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.320610] env[62627]: DEBUG oslo_vmware.api [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194360, 'name': PowerOffVM_Task, 'duration_secs': 0.211052} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.320610] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 872.320610] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 872.320610] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdabab1e-7b18-4fb8-87a0-6233572dd5c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.335267] env[62627]: DEBUG nova.objects.instance [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lazy-loading 'flavor' on Instance uuid faa16f10-63f0-457c-8cd7-20a624454d86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 872.365259] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194355, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.400642] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 872.400863] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 872.401055] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Deleting the datastore file [datastore2] 5f0240da-bec1-4bf3-a975-6ed3d0630a2d {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 872.401336] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b10f1a2-8c39-4b52-ba14-8d7b22312e94 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.410173] env[62627]: DEBUG nova.compute.manager [req-691b2ad8-71d2-4cab-abba-77f68fc6c700 req-6c40ddc7-a35a-41b7-ac89-308f731bd2bf service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Received event network-vif-deleted-32dda461-7395-4104-9323-4a39535875de {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 872.410414] env[62627]: INFO nova.compute.manager [req-691b2ad8-71d2-4cab-abba-77f68fc6c700 req-6c40ddc7-a35a-41b7-ac89-308f731bd2bf service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Neutron deleted interface 32dda461-7395-4104-9323-4a39535875de; detaching it from the instance and deleting it from the info cache [ 872.410521] env[62627]: DEBUG nova.network.neutron [req-691b2ad8-71d2-4cab-abba-77f68fc6c700 req-6c40ddc7-a35a-41b7-ac89-308f731bd2bf service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.413759] env[62627]: DEBUG oslo_vmware.api [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for the task: (returnval){ [ 872.413759] env[62627]: value = "task-2194364" [ 872.413759] env[62627]: _type = "Task" [ 872.413759] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.421467] env[62627]: DEBUG oslo_vmware.api [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194361, 'name': ResetVM_Task, 'duration_secs': 0.179355} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.422260] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Did hard reboot of VM {{(pid=62627) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 872.422473] env[62627]: DEBUG nova.compute.manager [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.424035] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8703f3c9-f490-460a-b133-bfe5fe4d21b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.433491] env[62627]: DEBUG oslo_vmware.api [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.464797] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d2df91bd-7efc-46d9-9c39-f6d0d85eee8d tempest-AttachInterfacesV270Test-1651543906 tempest-AttachInterfacesV270Test-1651543906-project-member] Lock "95ac4fbf-071a-403e-8cae-88d9899757ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.582s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.468582] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cc6eb1f9-32f9-485d-9db7-a375a64b1299 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "76fb58dc-df20-464b-bda0-dcf09d69c718" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.100s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.482909] env[62627]: DEBUG oslo_concurrency.lockutils [None req-283edade-b2c2-46cb-aeec-7eeafacf7be6 tempest-ListServersNegativeTestJSON-1492694773 tempest-ListServersNegativeTestJSON-1492694773-project-member] Lock "f09c685d-9103-4c94-a44a-266aacc334f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.294s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.559917] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194362, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.642458] env[62627]: DEBUG nova.network.neutron [-] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.671285] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194359, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560422} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.671715] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8/4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 872.671949] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 872.672263] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f579a0c-a068-4fed-b516-2d53c44e66e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.682490] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 872.682490] env[62627]: value = "task-2194365" [ 872.682490] env[62627]: _type = "Task" [ 872.682490] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.695817] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194365, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.841650] env[62627]: DEBUG oslo_concurrency.lockutils [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquiring lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.841897] env[62627]: DEBUG oslo_concurrency.lockutils [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquired lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.866093] env[62627]: DEBUG oslo_vmware.api [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194355, 'name': PowerOnVM_Task, 'duration_secs': 2.048184} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.866658] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 872.866860] env[62627]: INFO nova.compute.manager [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Took 11.55 seconds to spawn the instance on the hypervisor. [ 872.867254] env[62627]: DEBUG nova.compute.manager [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.867852] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0980e4f-2329-4ff7-8149-2daed7ce9fad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.915550] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21d6cde3-08d2-4d92-851e-aacf60c3f6c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.939049] env[62627]: DEBUG oslo_vmware.api [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Task: {'id': task-2194364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169533} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.945930] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 872.946669] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 872.947053] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 872.947301] env[62627]: INFO nova.compute.manager [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Took 1.17 seconds to destroy the instance on the hypervisor. [ 872.948185] env[62627]: DEBUG oslo.service.loopingcall [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 872.953861] env[62627]: DEBUG nova.compute.manager [-] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 872.953861] env[62627]: DEBUG nova.network.neutron [-] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 872.957793] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e49c8d1-a3ee-46f9-a49d-943685debc83 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.971438] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d114839-7f37-4a75-b9e2-92645243fb35 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "d28285ed-2ec0-43a4-bd34-82d33816fea4" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.667s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.973423] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "interface-a12855e2-1544-4e85-a7ff-c026fec861b6-202d765c-7217-40e5-9216-6ffe27003834" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.973423] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-a12855e2-1544-4e85-a7ff-c026fec861b6-202d765c-7217-40e5-9216-6ffe27003834" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.008603] env[62627]: DEBUG nova.compute.manager [req-691b2ad8-71d2-4cab-abba-77f68fc6c700 req-6c40ddc7-a35a-41b7-ac89-308f731bd2bf service nova] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Detach interface failed, port_id=32dda461-7395-4104-9323-4a39535875de, reason: Instance db8ddb7b-b119-415c-98d9-dc5b3d2868b7 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 873.060140] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194362, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.145009] env[62627]: INFO nova.compute.manager [-] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Took 1.72 seconds to deallocate network for instance. [ 873.203323] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194365, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068635} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.203323] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 873.205832] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af06758d-ca34-4c48-883d-97adcc0b5e61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.237122] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8/4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 873.239840] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0451ffb8-7119-4fb0-a725-1a6d1400b5b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.261801] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 873.261801] env[62627]: value = "task-2194366" [ 873.261801] env[62627]: _type = "Task" [ 873.261801] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.278935] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194366, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.394298] env[62627]: INFO nova.compute.manager [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Took 49.14 seconds to build instance. [ 873.482370] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.482587] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.484020] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9888fa-78a3-4356-a817-26765229487a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.508312] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9dfa30c-91f4-4115-8d0e-a5cee5415495 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.536520] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Reconfiguring VM to detach interface {{(pid=62627) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 873.539689] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68a34780-e4c6-45aa-8196-b2981389ca8c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.374072] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.374072] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3f36df-1ab0-46c3-8365-53414ad6f4eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "1129349d-65db-4e1d-bb08-cea793adf996" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.114s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.374072] env[62627]: DEBUG nova.network.neutron [-] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.374072] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194362, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.374072] env[62627]: WARNING oslo_vmware.common.loopingcall [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] task run outlasted interval by 0.317395 sec [ 874.377701] env[62627]: DEBUG nova.network.neutron [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.389566] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 874.389566] env[62627]: value = "task-2194367" [ 874.389566] env[62627]: _type = "Task" [ 874.389566] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.402038] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194366, 'name': ReconfigVM_Task, 'duration_secs': 0.708808} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.402305] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8/4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 874.402786] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcf839bf-fd52-48b3-884f-aefcb4a3ce7a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.410710] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194362, 'name': CloneVM_Task, 'duration_secs': 2.012876} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.411192] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.412817] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Created linked-clone VM from snapshot [ 874.413374] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901e3495-4f12-4830-9bcd-c25f74306029 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.416377] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecdd2333-7be4-4c16-aabc-0c710d5da208 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.420865] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 874.420865] env[62627]: value = "task-2194368" [ 874.420865] env[62627]: _type = "Task" [ 874.420865] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.429258] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Uploading image b6b3c30e-e15d-434a-aa3a-097b0c9be220 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 874.433512] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61395d1e-f1b3-433b-8345-3334f1d62406 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.439313] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194368, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.475276] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8061f735-413c-4e43-9f39-f5336c679767 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.485345] env[62627]: DEBUG oslo_vmware.rw_handles [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 874.485345] env[62627]: value = "vm-447658" [ 874.485345] env[62627]: _type = "VirtualMachine" [ 874.485345] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 874.486599] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484ec30d-5ffb-4e74-be54-7f22cb8d5d55 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.490703] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c0eff3c4-3252-4166-bbdf-2b0f2a4be426 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.506896] env[62627]: DEBUG nova.compute.provider_tree [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.509707] env[62627]: DEBUG oslo_vmware.rw_handles [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lease: (returnval){ [ 874.509707] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52780d62-b4d1-2088-c790-6f6601208498" [ 874.509707] env[62627]: _type = "HttpNfcLease" [ 874.509707] env[62627]: } obtained for exporting VM: (result){ [ 874.509707] env[62627]: value = "vm-447658" [ 874.509707] env[62627]: _type = "VirtualMachine" [ 874.509707] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 874.509917] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the lease: (returnval){ [ 874.509917] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52780d62-b4d1-2088-c790-6f6601208498" [ 874.509917] env[62627]: _type = "HttpNfcLease" [ 874.509917] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 874.517458] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 874.517458] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52780d62-b4d1-2088-c790-6f6601208498" [ 874.517458] env[62627]: _type = "HttpNfcLease" [ 874.517458] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 874.719732] env[62627]: DEBUG nova.compute.manager [req-0655e994-46d4-4edb-89d9-e2956eca5e7d req-d6c7ca91-1d55-4d87-9bc5-5fc401ec5066 service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Received event network-changed-bf72690a-0358-469d-8d21-85ef459cbcb9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 874.719972] env[62627]: DEBUG nova.compute.manager [req-0655e994-46d4-4edb-89d9-e2956eca5e7d req-d6c7ca91-1d55-4d87-9bc5-5fc401ec5066 service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Refreshing instance network info cache due to event network-changed-bf72690a-0358-469d-8d21-85ef459cbcb9. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 874.720244] env[62627]: DEBUG oslo_concurrency.lockutils [req-0655e994-46d4-4edb-89d9-e2956eca5e7d req-d6c7ca91-1d55-4d87-9bc5-5fc401ec5066 service nova] Acquiring lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.720480] env[62627]: DEBUG oslo_concurrency.lockutils [req-0655e994-46d4-4edb-89d9-e2956eca5e7d req-d6c7ca91-1d55-4d87-9bc5-5fc401ec5066 service nova] Acquired lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.720740] env[62627]: DEBUG nova.network.neutron [req-0655e994-46d4-4edb-89d9-e2956eca5e7d req-d6c7ca91-1d55-4d87-9bc5-5fc401ec5066 service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Refreshing network info cache for port bf72690a-0358-469d-8d21-85ef459cbcb9 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 874.775478] env[62627]: DEBUG nova.compute.manager [req-3d1693da-189d-4c73-b5da-7d115ef618b6 req-7ba25259-74f4-450b-801b-0e8088c799a0 service nova] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Received event network-vif-deleted-2807cd3b-e852-4c5c-bd4c-409c4d5c7e2f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 874.775667] env[62627]: DEBUG nova.compute.manager [req-3d1693da-189d-4c73-b5da-7d115ef618b6 req-7ba25259-74f4-450b-801b-0e8088c799a0 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Received event network-changed-5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 874.775852] env[62627]: DEBUG nova.compute.manager [req-3d1693da-189d-4c73-b5da-7d115ef618b6 req-7ba25259-74f4-450b-801b-0e8088c799a0 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Refreshing instance network info cache due to event network-changed-5db92929-a96d-41c2-be2b-95c9ef62c4bf. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 874.775988] env[62627]: DEBUG oslo_concurrency.lockutils [req-3d1693da-189d-4c73-b5da-7d115ef618b6 req-7ba25259-74f4-450b-801b-0e8088c799a0 service nova] Acquiring lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.875241] env[62627]: INFO nova.compute.manager [-] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Took 1.92 seconds to deallocate network for instance. [ 874.889993] env[62627]: DEBUG nova.compute.manager [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 874.897522] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquiring lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.897947] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.898217] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquiring lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.898452] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.898723] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.902113] env[62627]: INFO nova.compute.manager [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Terminating instance [ 874.912035] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.936022] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194368, 'name': Rename_Task, 'duration_secs': 0.382354} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.936266] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 874.936584] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44c303cc-6918-48ca-8233-0b99e0e14e86 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.949124] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 874.949124] env[62627]: value = "task-2194370" [ 874.949124] env[62627]: _type = "Task" [ 874.949124] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.961323] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194370, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.978672] env[62627]: INFO nova.compute.manager [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Rescuing [ 874.978994] env[62627]: DEBUG oslo_concurrency.lockutils [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "refresh_cache-1129349d-65db-4e1d-bb08-cea793adf996" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.979200] env[62627]: DEBUG oslo_concurrency.lockutils [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired lock "refresh_cache-1129349d-65db-4e1d-bb08-cea793adf996" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.979438] env[62627]: DEBUG nova.network.neutron [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.011916] env[62627]: DEBUG nova.scheduler.client.report [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 875.016640] env[62627]: DEBUG nova.network.neutron [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updating instance_info_cache with network_info: [{"id": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "address": "fa:16:3e:c2:49:86", "network": {"id": "c9ffbe6a-63fb-4479-bc68-6209e534cbe0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-213656681-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d36f0395b094ef4aa913b538a31f6a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db92929-a9", "ovs_interfaceid": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.026695] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 875.026695] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52780d62-b4d1-2088-c790-6f6601208498" [ 875.026695] env[62627]: _type = "HttpNfcLease" [ 875.026695] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 875.027469] env[62627]: DEBUG oslo_vmware.rw_handles [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 875.027469] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52780d62-b4d1-2088-c790-6f6601208498" [ 875.027469] env[62627]: _type = "HttpNfcLease" [ 875.027469] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 875.027732] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd04ddf2-87d6-42d4-b3ae-1bb654846177 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.036555] env[62627]: DEBUG oslo_vmware.rw_handles [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c70b21-ec9c-2fcd-e5ea-62154534973d/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 875.036779] env[62627]: DEBUG oslo_vmware.rw_handles [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c70b21-ec9c-2fcd-e5ea-62154534973d/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 875.203459] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bb760d0e-cae5-4f60-acd5-eac594b46929 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.381600] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.409372] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.416214] env[62627]: DEBUG nova.compute.manager [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 875.416754] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 875.417636] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1c6306-5670-4657-a965-edd6f698b5dc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.424372] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.434732] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 875.434732] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a8afbc7-aaf6-40f7-bd3b-626bdbeab84d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.443944] env[62627]: DEBUG oslo_vmware.api [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for the task: (returnval){ [ 875.443944] env[62627]: value = "task-2194371" [ 875.443944] env[62627]: _type = "Task" [ 875.443944] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.466780] env[62627]: DEBUG oslo_vmware.api [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.466780] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194370, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.520519] env[62627]: DEBUG nova.objects.instance [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lazy-loading 'flavor' on Instance uuid faa16f10-63f0-457c-8cd7-20a624454d86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 875.524584] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.605s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.525304] env[62627]: DEBUG nova.compute.manager [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 875.532023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Releasing lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.532289] env[62627]: DEBUG nova.compute.manager [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Inject network info {{(pid=62627) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 875.532547] env[62627]: DEBUG nova.compute.manager [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] network_info to inject: |[{"id": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "address": "fa:16:3e:c2:49:86", "network": {"id": "c9ffbe6a-63fb-4479-bc68-6209e534cbe0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-213656681-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d36f0395b094ef4aa913b538a31f6a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db92929-a9", "ovs_interfaceid": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 875.538626] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Reconfiguring VM instance to set the machine id {{(pid=62627) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 875.538933] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.275s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.539200] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.541526] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.701s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.542792] env[62627]: INFO nova.compute.claims [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.547299] env[62627]: DEBUG oslo_concurrency.lockutils [req-3d1693da-189d-4c73-b5da-7d115ef618b6 req-7ba25259-74f4-450b-801b-0e8088c799a0 service nova] Acquired lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.547299] env[62627]: DEBUG nova.network.neutron [req-3d1693da-189d-4c73-b5da-7d115ef618b6 req-7ba25259-74f4-450b-801b-0e8088c799a0 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Refreshing network info cache for port 5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 875.547299] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4596a34f-bb81-4614-b164-52c016991833 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.577287] env[62627]: DEBUG oslo_vmware.api [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 875.577287] env[62627]: value = "task-2194372" [ 875.577287] env[62627]: _type = "Task" [ 875.577287] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.589531] env[62627]: DEBUG oslo_vmware.api [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194372, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.595872] env[62627]: INFO nova.scheduler.client.report [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Deleted allocations for instance cbd30c37-f6c3-4229-8b95-855cab64924c [ 875.739303] env[62627]: DEBUG nova.network.neutron [req-0655e994-46d4-4edb-89d9-e2956eca5e7d req-d6c7ca91-1d55-4d87-9bc5-5fc401ec5066 service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Updated VIF entry in instance network info cache for port bf72690a-0358-469d-8d21-85ef459cbcb9. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 875.739678] env[62627]: DEBUG nova.network.neutron [req-0655e994-46d4-4edb-89d9-e2956eca5e7d req-d6c7ca91-1d55-4d87-9bc5-5fc401ec5066 service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Updating instance_info_cache with network_info: [{"id": "bf72690a-0358-469d-8d21-85ef459cbcb9", "address": "fa:16:3e:16:7f:81", "network": {"id": "d75e7b13-f4ab-4626-9799-4970d691f831", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-781733128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca351451d2344fc1872f6faf89412354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf72690a-03", "ovs_interfaceid": "bf72690a-0358-469d-8d21-85ef459cbcb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.748622] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "d28285ed-2ec0-43a4-bd34-82d33816fea4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.748872] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "d28285ed-2ec0-43a4-bd34-82d33816fea4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.750991] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "d28285ed-2ec0-43a4-bd34-82d33816fea4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.751573] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "d28285ed-2ec0-43a4-bd34-82d33816fea4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.751864] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "d28285ed-2ec0-43a4-bd34-82d33816fea4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.757135] env[62627]: INFO nova.compute.manager [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Terminating instance [ 875.918579] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.953749] env[62627]: DEBUG oslo_vmware.api [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194371, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.964946] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194370, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.042832] env[62627]: DEBUG nova.compute.utils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 876.045589] env[62627]: DEBUG oslo_concurrency.lockutils [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquiring lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.045589] env[62627]: DEBUG nova.compute.manager [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 876.045589] env[62627]: DEBUG nova.network.neutron [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 876.089770] env[62627]: DEBUG oslo_vmware.api [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194372, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.107388] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d5474d12-367e-4429-9f89-592f2e3a2db8 tempest-DeleteServersAdminTestJSON-1615940801 tempest-DeleteServersAdminTestJSON-1615940801-project-member] Lock "cbd30c37-f6c3-4229-8b95-855cab64924c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.299s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.243275] env[62627]: DEBUG oslo_concurrency.lockutils [req-0655e994-46d4-4edb-89d9-e2956eca5e7d req-d6c7ca91-1d55-4d87-9bc5-5fc401ec5066 service nova] Releasing lock "refresh_cache-d28285ed-2ec0-43a4-bd34-82d33816fea4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.264984] env[62627]: DEBUG nova.compute.manager [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 876.265359] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 876.266552] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724a4fa4-4ffd-41e7-bd9e-bdf7dc8c1c59 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.276832] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 876.276832] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d01c3521-f41c-482f-8ed4-f6c251ee587c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.284708] env[62627]: DEBUG oslo_vmware.api [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 876.284708] env[62627]: value = "task-2194373" [ 876.284708] env[62627]: _type = "Task" [ 876.284708] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.293803] env[62627]: DEBUG oslo_vmware.api [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.405153] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.462493] env[62627]: DEBUG oslo_vmware.api [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194371, 'name': PowerOffVM_Task, 'duration_secs': 0.550179} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.464267] env[62627]: DEBUG nova.network.neutron [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Updating instance_info_cache with network_info: [{"id": "f91eb28c-7881-4e2c-b51e-7c0f169652d1", "address": "fa:16:3e:d0:b1:d8", "network": {"id": "6868e7f7-6e86-4ef3-a3f2-82b652203315", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-729196523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ad7b328aba44b51aec9455bada6678c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf91eb28c-78", "ovs_interfaceid": "f91eb28c-7881-4e2c-b51e-7c0f169652d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.471562] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.471939] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 876.475240] env[62627]: DEBUG nova.policy [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ade098da576147a6bff6685cb886ff78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a97f57bdf4a46db80460b1d5c9f9b9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 876.477858] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd2be1d6-fa34-4542-865f-3865b1cd9ad6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.487202] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194370, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.549337] env[62627]: DEBUG nova.compute.manager [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 876.578593] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.578959] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.579288] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Deleting the datastore file [datastore2] 7691c2e2-4e43-4990-bf4d-2ef3298a4264 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.579782] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd954c01-6d3a-4d4a-9779-4521b94cb579 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.593605] env[62627]: DEBUG oslo_vmware.api [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194372, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.598009] env[62627]: DEBUG oslo_vmware.api [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for the task: (returnval){ [ 876.598009] env[62627]: value = "task-2194375" [ 876.598009] env[62627]: _type = "Task" [ 876.598009] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.606276] env[62627]: DEBUG oslo_vmware.api [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194375, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.722217] env[62627]: DEBUG nova.network.neutron [req-3d1693da-189d-4c73-b5da-7d115ef618b6 req-7ba25259-74f4-450b-801b-0e8088c799a0 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updated VIF entry in instance network info cache for port 5db92929-a96d-41c2-be2b-95c9ef62c4bf. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 876.722643] env[62627]: DEBUG nova.network.neutron [req-3d1693da-189d-4c73-b5da-7d115ef618b6 req-7ba25259-74f4-450b-801b-0e8088c799a0 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updating instance_info_cache with network_info: [{"id": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "address": "fa:16:3e:c2:49:86", "network": {"id": "c9ffbe6a-63fb-4479-bc68-6209e534cbe0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-213656681-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d36f0395b094ef4aa913b538a31f6a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db92929-a9", "ovs_interfaceid": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.796803] env[62627]: DEBUG oslo_vmware.api [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194373, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.909751] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.976984] env[62627]: DEBUG oslo_concurrency.lockutils [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Releasing lock "refresh_cache-1129349d-65db-4e1d-bb08-cea793adf996" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.977195] env[62627]: DEBUG oslo_vmware.api [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194370, 'name': PowerOnVM_Task, 'duration_secs': 1.707803} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.983331] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 876.983331] env[62627]: INFO nova.compute.manager [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Took 11.29 seconds to spawn the instance on the hypervisor. [ 876.983331] env[62627]: DEBUG nova.compute.manager [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 876.983331] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9271d64-5927-4bbc-a990-e8974807de77 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.095152] env[62627]: DEBUG nova.network.neutron [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Successfully created port: b0e730ac-28a5-40f0-bbe1-c4f48f060624 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.105072] env[62627]: DEBUG oslo_vmware.api [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194372, 'name': ReconfigVM_Task, 'duration_secs': 1.197982} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.115493] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-21a35e0b-f9d4-4a7b-9e00-133a788fa523 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Reconfigured VM instance to set the machine id {{(pid=62627) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 877.131933] env[62627]: DEBUG oslo_vmware.api [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Task: {'id': task-2194375, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262479} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.132275] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 877.132460] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 877.132630] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 877.132857] env[62627]: INFO nova.compute.manager [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Took 1.72 seconds to destroy the instance on the hypervisor. [ 877.134838] env[62627]: DEBUG oslo.service.loopingcall [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 877.135180] env[62627]: DEBUG nova.compute.manager [-] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 877.135290] env[62627]: DEBUG nova.network.neutron [-] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 877.230058] env[62627]: DEBUG oslo_concurrency.lockutils [req-3d1693da-189d-4c73-b5da-7d115ef618b6 req-7ba25259-74f4-450b-801b-0e8088c799a0 service nova] Releasing lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.230491] env[62627]: DEBUG oslo_concurrency.lockutils [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquired lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.255939] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12800761-8aec-493c-96b7-8a53b4e6cca7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.265974] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ee5dce-c932-441b-9568-20458a45e010 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.319600] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0162501-5749-400b-8ba5-4d37a619bebd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.336454] env[62627]: DEBUG oslo_vmware.api [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194373, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.340326] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8313487-1d2d-49f5-87b6-bcb347e0c02c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.358948] env[62627]: DEBUG nova.compute.provider_tree [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.407432] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.513571] env[62627]: INFO nova.compute.manager [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Took 50.34 seconds to build instance. [ 877.565180] env[62627]: DEBUG nova.compute.manager [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 877.602864] env[62627]: DEBUG nova.virt.hardware [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 877.603234] env[62627]: DEBUG nova.virt.hardware [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.603472] env[62627]: DEBUG nova.virt.hardware [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 877.603712] env[62627]: DEBUG nova.virt.hardware [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.603953] env[62627]: DEBUG nova.virt.hardware [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 877.604324] env[62627]: DEBUG nova.virt.hardware [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 877.604642] env[62627]: DEBUG nova.virt.hardware [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 877.604912] env[62627]: DEBUG nova.virt.hardware [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 877.605293] env[62627]: DEBUG nova.virt.hardware [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 877.605590] env[62627]: DEBUG nova.virt.hardware [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 877.605824] env[62627]: DEBUG nova.virt.hardware [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 877.607309] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5f5895-c298-4186-98f7-526c311d04ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.618424] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1fd7f70-565e-41b6-a843-61901764ebd8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.832499] env[62627]: DEBUG oslo_vmware.api [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194373, 'name': PowerOffVM_Task, 'duration_secs': 1.306467} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.832499] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 877.832499] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 877.832499] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1979f69-73f2-4b0b-83d2-4d3cb16650b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.863322] env[62627]: DEBUG nova.scheduler.client.report [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 877.907418] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 877.908315] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 877.909030] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Deleting the datastore file [datastore1] d28285ed-2ec0-43a4-bd34-82d33816fea4 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 877.909906] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8075e61f-d6aa-446b-8f99-11c162f05365 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.918285] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.926232] env[62627]: DEBUG oslo_vmware.api [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 877.926232] env[62627]: value = "task-2194377" [ 877.926232] env[62627]: _type = "Task" [ 877.926232] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.937555] env[62627]: DEBUG oslo_vmware.api [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194377, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.016332] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e07ad398-ec59-4e97-90ad-096feeaf607f tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.434s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.372553] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.829s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.372553] env[62627]: DEBUG nova.compute.manager [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 878.374200] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.438s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.376736] env[62627]: INFO nova.compute.claims [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.412892] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.434456] env[62627]: DEBUG nova.network.neutron [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 878.439507] env[62627]: DEBUG oslo_vmware.api [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194377, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.427117} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.439752] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 878.440196] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 878.440196] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 878.440354] env[62627]: INFO nova.compute.manager [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Took 2.18 seconds to destroy the instance on the hypervisor. [ 878.440785] env[62627]: DEBUG oslo.service.loopingcall [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.441110] env[62627]: DEBUG nova.compute.manager [-] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 878.441224] env[62627]: DEBUG nova.network.neutron [-] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 878.518505] env[62627]: DEBUG nova.compute.manager [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 878.525709] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.525709] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f04e14d0-0e14-4f3e-bfa5-9e03258b2214 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.533460] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 878.533460] env[62627]: value = "task-2194378" [ 878.533460] env[62627]: _type = "Task" [ 878.533460] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.545999] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.805957] env[62627]: DEBUG nova.compute.manager [req-c47a41f1-973f-45b0-a4d5-21c84c374ffd req-3146cf88-26a2-4f11-bc7b-937531e72e94 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Received event network-changed-5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 878.806228] env[62627]: DEBUG nova.compute.manager [req-c47a41f1-973f-45b0-a4d5-21c84c374ffd req-3146cf88-26a2-4f11-bc7b-937531e72e94 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Refreshing instance network info cache due to event network-changed-5db92929-a96d-41c2-be2b-95c9ef62c4bf. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 878.806824] env[62627]: DEBUG oslo_concurrency.lockutils [req-c47a41f1-973f-45b0-a4d5-21c84c374ffd req-3146cf88-26a2-4f11-bc7b-937531e72e94 service nova] Acquiring lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.882148] env[62627]: DEBUG nova.compute.utils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 878.888353] env[62627]: DEBUG nova.compute.manager [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 878.888609] env[62627]: DEBUG nova.network.neutron [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 878.923717] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.998055] env[62627]: DEBUG nova.network.neutron [-] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.007613] env[62627]: DEBUG nova.policy [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '719a4b973b0644d49796851afe6055c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecf9c58481c14634b2315248118ed1b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 879.046751] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194378, 'name': PowerOffVM_Task, 'duration_secs': 0.240067} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.047053] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.047974] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2aa5eb1-f5dc-46c0-ae07-63472c3f01c6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.052678] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.071087] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1caf1f75-70c0-4692-89cc-439050038072 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.108552] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.108889] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30037dc6-df2d-43e6-b54a-54e39fadd39b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.119251] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 879.119251] env[62627]: value = "task-2194379" [ 879.119251] env[62627]: _type = "Task" [ 879.119251] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.128213] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194379, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.389804] env[62627]: DEBUG nova.compute.manager [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 879.419502] env[62627]: DEBUG nova.network.neutron [-] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.420920] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.504308] env[62627]: INFO nova.compute.manager [-] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Took 2.37 seconds to deallocate network for instance. [ 879.636022] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 879.636022] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 879.636022] env[62627]: DEBUG oslo_concurrency.lockutils [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.636022] env[62627]: DEBUG oslo_concurrency.lockutils [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.636022] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.636022] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c1281e6-3f41-4e36-97cb-a487c1a0bfa0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.649695] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.652413] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 879.652413] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fc66786-62a8-459f-9443-0622521d30e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.682629] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 879.682629] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525d965f-5ccc-4db2-d62e-6840a1b52bda" [ 879.682629] env[62627]: _type = "Task" [ 879.682629] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.687374] env[62627]: DEBUG nova.network.neutron [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updating instance_info_cache with network_info: [{"id": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "address": "fa:16:3e:c2:49:86", "network": {"id": "c9ffbe6a-63fb-4479-bc68-6209e534cbe0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-213656681-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d36f0395b094ef4aa913b538a31f6a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db92929-a9", "ovs_interfaceid": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.713020] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525d965f-5ccc-4db2-d62e-6840a1b52bda, 'name': SearchDatastore_Task, 'duration_secs': 0.015258} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.716982] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b514fd55-ea79-4f8f-8e50-442a40ef2980 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.721409] env[62627]: DEBUG oslo_concurrency.lockutils [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "a397bce6-30c7-485d-81ab-88a8e021996c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.723400] env[62627]: DEBUG oslo_concurrency.lockutils [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.730906] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 879.730906] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528673c5-787e-e12d-b658-670af0e58032" [ 879.730906] env[62627]: _type = "Task" [ 879.730906] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.742080] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528673c5-787e-e12d-b658-670af0e58032, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.922143] env[62627]: DEBUG oslo_vmware.api [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194367, 'name': ReconfigVM_Task, 'duration_secs': 5.863647} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.922143] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.922143] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Reconfigured VM to detach interface {{(pid=62627) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 879.924798] env[62627]: INFO nova.compute.manager [-] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Took 1.48 seconds to deallocate network for instance. [ 880.023055] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.037564] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac5715f-a945-4911-8e8c-a13cbd7b2d87 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.045771] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18dcd7f-4954-419f-8e7c-c83405ab6cd9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.084955] env[62627]: DEBUG nova.network.neutron [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Successfully created port: 6bf6cde4-6fe7-4f05-a8ed-b57e621b565b {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.090152] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ce3dd6-d355-4836-8d38-bdebb082f25d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.096418] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9de73a2-d2f0-4535-ac31-77e77846c9d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.112785] env[62627]: DEBUG nova.compute.provider_tree [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.199225] env[62627]: DEBUG oslo_concurrency.lockutils [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Releasing lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.199516] env[62627]: DEBUG nova.compute.manager [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Inject network info {{(pid=62627) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 880.199802] env[62627]: DEBUG nova.compute.manager [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] network_info to inject: |[{"id": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "address": "fa:16:3e:c2:49:86", "network": {"id": "c9ffbe6a-63fb-4479-bc68-6209e534cbe0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-213656681-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d36f0395b094ef4aa913b538a31f6a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db92929-a9", "ovs_interfaceid": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 880.204753] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Reconfiguring VM instance to set the machine id {{(pid=62627) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 880.205210] env[62627]: DEBUG oslo_concurrency.lockutils [req-c47a41f1-973f-45b0-a4d5-21c84c374ffd req-3146cf88-26a2-4f11-bc7b-937531e72e94 service nova] Acquired lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.205562] env[62627]: DEBUG nova.network.neutron [req-c47a41f1-973f-45b0-a4d5-21c84c374ffd req-3146cf88-26a2-4f11-bc7b-937531e72e94 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Refreshing network info cache for port 5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 880.206759] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8df651d-fe5b-44a3-8dc2-7d028408381c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.225713] env[62627]: DEBUG nova.compute.utils [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 880.228933] env[62627]: DEBUG oslo_vmware.api [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 880.228933] env[62627]: value = "task-2194380" [ 880.228933] env[62627]: _type = "Task" [ 880.228933] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.247512] env[62627]: DEBUG oslo_vmware.api [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194380, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.252952] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528673c5-787e-e12d-b658-670af0e58032, 'name': SearchDatastore_Task, 'duration_secs': 0.033726} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.253320] env[62627]: DEBUG oslo_concurrency.lockutils [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.253618] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 1129349d-65db-4e1d-bb08-cea793adf996/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. {{(pid=62627) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 880.253898] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7200b0d-1d5a-4457-9e1b-cabcb7f07ae8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.262765] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 880.262765] env[62627]: value = "task-2194381" [ 880.262765] env[62627]: _type = "Task" [ 880.262765] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.275125] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.372407] env[62627]: DEBUG nova.network.neutron [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Successfully updated port: b0e730ac-28a5-40f0-bbe1-c4f48f060624 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 880.409051] env[62627]: DEBUG nova.compute.manager [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 880.435676] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.447243] env[62627]: DEBUG nova.virt.hardware [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 880.447495] env[62627]: DEBUG nova.virt.hardware [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.447655] env[62627]: DEBUG nova.virt.hardware [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 880.447836] env[62627]: DEBUG nova.virt.hardware [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.447982] env[62627]: DEBUG nova.virt.hardware [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 880.448176] env[62627]: DEBUG nova.virt.hardware [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 880.448455] env[62627]: DEBUG nova.virt.hardware [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 880.448668] env[62627]: DEBUG nova.virt.hardware [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 880.448868] env[62627]: DEBUG nova.virt.hardware [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 880.449051] env[62627]: DEBUG nova.virt.hardware [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 880.449229] env[62627]: DEBUG nova.virt.hardware [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 880.450264] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7c5d97-82ab-408c-9f26-fb943907c151 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.460988] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7422fd5-56a3-4a9f-89c4-4caef40715c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.617264] env[62627]: DEBUG nova.scheduler.client.report [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 880.730068] env[62627]: DEBUG oslo_concurrency.lockutils [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.743432] env[62627]: DEBUG oslo_vmware.api [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194380, 'name': ReconfigVM_Task, 'duration_secs': 0.159139} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.743702] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-180fbcdf-5aef-4b23-82a8-3df0fcb40b1f tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Reconfigured VM instance to set the machine id {{(pid=62627) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 880.775962] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194381, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.877480] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.877480] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.877480] env[62627]: DEBUG nova.network.neutron [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.124542] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.750s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.126460] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 881.129909] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.310s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.133476] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.133476] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.082s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.135086] env[62627]: INFO nova.compute.claims [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 881.176328] env[62627]: INFO nova.scheduler.client.report [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Deleted allocations for instance a674db15-ea4e-44c8-abc1-6af425286517 [ 881.235616] env[62627]: DEBUG nova.network.neutron [req-c47a41f1-973f-45b0-a4d5-21c84c374ffd req-3146cf88-26a2-4f11-bc7b-937531e72e94 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updated VIF entry in instance network info cache for port 5db92929-a96d-41c2-be2b-95c9ef62c4bf. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 881.235616] env[62627]: DEBUG nova.network.neutron [req-c47a41f1-973f-45b0-a4d5-21c84c374ffd req-3146cf88-26a2-4f11-bc7b-937531e72e94 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updating instance_info_cache with network_info: [{"id": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "address": "fa:16:3e:c2:49:86", "network": {"id": "c9ffbe6a-63fb-4479-bc68-6209e534cbe0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-213656681-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d36f0395b094ef4aa913b538a31f6a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db92929-a9", "ovs_interfaceid": "5db92929-a96d-41c2-be2b-95c9ef62c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.280953] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194381, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.936444} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.281289] env[62627]: INFO nova.virt.vmwareapi.ds_util [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 1129349d-65db-4e1d-bb08-cea793adf996/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. [ 881.282142] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff30616-1b74-4814-9b16-9d4f15587725 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.315559] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 1129349d-65db-4e1d-bb08-cea793adf996/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.317374] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94ad9d34-69a6-4f65-9fa9-84cbfc391941 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.341943] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 881.341943] env[62627]: value = "task-2194382" [ 881.341943] env[62627]: _type = "Task" [ 881.341943] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.351919] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194382, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.353688] env[62627]: DEBUG nova.network.neutron [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Successfully created port: e9aeb5b9-eeec-406e-a7d0-6207531ec459 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.459797] env[62627]: DEBUG nova.network.neutron [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.463470] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquiring lock "faa16f10-63f0-457c-8cd7-20a624454d86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.463660] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lock "faa16f10-63f0-457c-8cd7-20a624454d86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.463882] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquiring lock "faa16f10-63f0-457c-8cd7-20a624454d86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.464466] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lock "faa16f10-63f0-457c-8cd7-20a624454d86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.465223] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lock "faa16f10-63f0-457c-8cd7-20a624454d86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.466731] env[62627]: INFO nova.compute.manager [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Terminating instance [ 881.632066] env[62627]: DEBUG nova.compute.utils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.633737] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 881.637376] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 881.689568] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a2b9dec8-2925-4028-8cda-86c25f2035c6 tempest-FloatingIPsAssociationTestJSON-1435038635 tempest-FloatingIPsAssociationTestJSON-1435038635-project-member] Lock "a674db15-ea4e-44c8-abc1-6af425286517" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.608s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.739617] env[62627]: DEBUG oslo_concurrency.lockutils [req-c47a41f1-973f-45b0-a4d5-21c84c374ffd req-3146cf88-26a2-4f11-bc7b-937531e72e94 service nova] Releasing lock "refresh_cache-faa16f10-63f0-457c-8cd7-20a624454d86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.823484] env[62627]: DEBUG nova.compute.manager [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Received event network-vif-deleted-abec21e1-204b-4ed0-801a-ab9f9bb8fcb9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 881.825113] env[62627]: DEBUG nova.compute.manager [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Received event network-vif-deleted-bf72690a-0358-469d-8d21-85ef459cbcb9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 881.825113] env[62627]: DEBUG nova.compute.manager [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Received event network-vif-plugged-b0e730ac-28a5-40f0-bbe1-c4f48f060624 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 881.825113] env[62627]: DEBUG oslo_concurrency.lockutils [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] Acquiring lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.825113] env[62627]: DEBUG oslo_concurrency.lockutils [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] Lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.825113] env[62627]: DEBUG oslo_concurrency.lockutils [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] Lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.825113] env[62627]: DEBUG nova.compute.manager [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] No waiting events found dispatching network-vif-plugged-b0e730ac-28a5-40f0-bbe1-c4f48f060624 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 881.825113] env[62627]: WARNING nova.compute.manager [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Received unexpected event network-vif-plugged-b0e730ac-28a5-40f0-bbe1-c4f48f060624 for instance with vm_state building and task_state spawning. [ 881.825113] env[62627]: DEBUG nova.compute.manager [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Received event network-changed-b0e730ac-28a5-40f0-bbe1-c4f48f060624 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 881.825113] env[62627]: DEBUG nova.compute.manager [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Refreshing instance network info cache due to event network-changed-b0e730ac-28a5-40f0-bbe1-c4f48f060624. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 881.825510] env[62627]: DEBUG oslo_concurrency.lockutils [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] Acquiring lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.836686] env[62627]: DEBUG oslo_concurrency.lockutils [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "a397bce6-30c7-485d-81ab-88a8e021996c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.836686] env[62627]: DEBUG oslo_concurrency.lockutils [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.836686] env[62627]: INFO nova.compute.manager [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Attaching volume 48409ca3-f1ae-40de-ad9d-766d53425c27 to /dev/sdb [ 881.840861] env[62627]: DEBUG nova.policy [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9511c993533466faa59ca47d51b2964', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fe72f9f87144786adbb917cf0da1b19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 881.859442] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194382, 'name': ReconfigVM_Task, 'duration_secs': 0.372732} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.859755] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 1129349d-65db-4e1d-bb08-cea793adf996/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.860701] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5564f1f-2c2b-4a79-bb08-60117e42ae5e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.907723] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-438f1d01-881b-4284-a7b1-dde6687f3de0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.921170] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecfcb7b-f4cb-41b0-92a0-571624b0d9f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.932384] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd74cad-3511-4784-9ae6-881136f21469 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.934979] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 881.934979] env[62627]: value = "task-2194383" [ 881.934979] env[62627]: _type = "Task" [ 881.934979] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.946495] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194383, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.948723] env[62627]: DEBUG nova.virt.block_device [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Updating existing volume attachment record: 47f657df-f340-43d8-92b1-1b0096a5bb90 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 881.975082] env[62627]: DEBUG nova.compute.manager [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 881.976070] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 881.976321] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.976569] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.976687] env[62627]: DEBUG nova.network.neutron [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.978512] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4090545a-f872-49c5-8f6d-00113860d082 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.993693] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.994726] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87ea1f4a-698a-4c78-8993-72cdbac4af88 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.004594] env[62627]: DEBUG oslo_vmware.api [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 882.004594] env[62627]: value = "task-2194384" [ 882.004594] env[62627]: _type = "Task" [ 882.004594] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.016942] env[62627]: DEBUG oslo_vmware.api [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.057163] env[62627]: DEBUG nova.network.neutron [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Updating instance_info_cache with network_info: [{"id": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "address": "fa:16:3e:66:a3:d6", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0e730ac-28", "ovs_interfaceid": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.141166] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 882.385303] env[62627]: DEBUG nova.network.neutron [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Successfully created port: e8a543dc-e58e-4c74-a763-b795643a2d53 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.449026] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194383, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.520291] env[62627]: DEBUG oslo_vmware.api [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.560195] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.562212] env[62627]: DEBUG nova.compute.manager [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Instance network_info: |[{"id": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "address": "fa:16:3e:66:a3:d6", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0e730ac-28", "ovs_interfaceid": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 882.562212] env[62627]: DEBUG oslo_concurrency.lockutils [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] Acquired lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.562212] env[62627]: DEBUG nova.network.neutron [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Refreshing network info cache for port b0e730ac-28a5-40f0-bbe1-c4f48f060624 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 882.563149] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:a3:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f499bc9-78da-46c1-9274-19edf26d31cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0e730ac-28a5-40f0-bbe1-c4f48f060624', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.573554] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Creating folder: Project (7a97f57bdf4a46db80460b1d5c9f9b9f). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 882.576157] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30bd96ec-960c-4497-b8b8-6c4ac2a53b09 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.602416] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Created folder: Project (7a97f57bdf4a46db80460b1d5c9f9b9f) in parent group-v447541. [ 882.602416] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Creating folder: Instances. Parent ref: group-v447661. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 882.602416] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88949ad4-423d-42c4-a0f1-83eec1010fbe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.626732] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Created folder: Instances in parent group-v447661. [ 882.626732] env[62627]: DEBUG oslo.service.loopingcall [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.627146] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 882.627203] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28217e7c-3bb8-4b6b-88df-cde6c111c961 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.688943] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.688943] env[62627]: value = "task-2194390" [ 882.688943] env[62627]: _type = "Task" [ 882.688943] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.702737] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194390, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.804721] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86102a0-f6bf-4212-81b5-e02ff2598ac0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.814372] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2080fdd9-e0d0-4faf-a0f4-f8bf3a445e8d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.851803] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad46798-88fa-43d6-918f-248802a0162c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.861822] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee1e9c3-c7ba-4e31-abfd-bdb2457c5eff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.879032] env[62627]: DEBUG nova.compute.provider_tree [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.947150] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194383, 'name': ReconfigVM_Task, 'duration_secs': 0.958391} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.947566] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 882.947818] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f868f739-d6fa-41ff-ac4e-4d9cf5def73e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.956514] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 882.956514] env[62627]: value = "task-2194391" [ 882.956514] env[62627]: _type = "Task" [ 882.956514] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.965582] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.018933] env[62627]: DEBUG oslo_vmware.api [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194384, 'name': PowerOffVM_Task, 'duration_secs': 0.888353} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.019371] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 883.019483] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 883.019680] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54ae845c-be11-4c6d-ac45-09f1070b59ad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.038300] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Successfully created port: 40b19e0d-048f-4bca-bfea-412ad98b0568 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 883.085942] env[62627]: DEBUG oslo_concurrency.lockutils [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "a12855e2-1544-4e85-a7ff-c026fec861b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.086430] env[62627]: DEBUG oslo_concurrency.lockutils [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "a12855e2-1544-4e85-a7ff-c026fec861b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.086785] env[62627]: DEBUG oslo_concurrency.lockutils [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "a12855e2-1544-4e85-a7ff-c026fec861b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.087293] env[62627]: DEBUG oslo_concurrency.lockutils [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "a12855e2-1544-4e85-a7ff-c026fec861b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.087425] env[62627]: DEBUG oslo_concurrency.lockutils [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "a12855e2-1544-4e85-a7ff-c026fec861b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.091125] env[62627]: INFO nova.compute.manager [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Terminating instance [ 883.099417] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 883.099417] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 883.099417] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Deleting the datastore file [datastore1] faa16f10-63f0-457c-8cd7-20a624454d86 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 883.099417] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-49217d8b-703b-45db-bb93-699eccac488d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.108670] env[62627]: DEBUG oslo_vmware.api [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for the task: (returnval){ [ 883.108670] env[62627]: value = "task-2194393" [ 883.108670] env[62627]: _type = "Task" [ 883.108670] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.122319] env[62627]: DEBUG oslo_vmware.api [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194393, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.168639] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 883.205870] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 883.206132] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.206287] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 883.206496] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.206660] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 883.206803] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 883.207021] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 883.207192] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 883.207356] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 883.207516] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 883.207692] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 883.211193] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf77952-cb8c-4a42-a0e8-7756444295ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.217730] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194390, 'name': CreateVM_Task, 'duration_secs': 0.461414} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.218202] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 883.219504] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.219993] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.219993] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 883.220308] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69a03dbe-dc16-40bf-bf5a-48b212411e76 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.226693] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6fa073-f79e-4200-92d6-1fcbd44c002b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.232844] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 883.232844] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52accc7f-cd62-8d1a-fd11-2b15cfc47ab9" [ 883.232844] env[62627]: _type = "Task" [ 883.232844] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.252879] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52accc7f-cd62-8d1a-fd11-2b15cfc47ab9, 'name': SearchDatastore_Task, 'duration_secs': 0.014168} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.253228] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.253484] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 883.253719] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.253867] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.254061] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.254481] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bf011a1-f883-42c8-ad30-8f69fa0b1c82 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.274192] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.274192] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 883.275740] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec7e73a3-1249-4b25-ad22-f3f4a818441e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.283372] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 883.283372] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f19071-b965-1d76-681f-f529edaca21b" [ 883.283372] env[62627]: _type = "Task" [ 883.283372] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.293050] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f19071-b965-1d76-681f-f529edaca21b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.344625] env[62627]: INFO nova.network.neutron [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Port 202d765c-7217-40e5-9216-6ffe27003834 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 883.344625] env[62627]: DEBUG nova.network.neutron [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Updating instance_info_cache with network_info: [{"id": "21d30d49-10fb-462d-9b5f-7525c3497178", "address": "fa:16:3e:9b:65:45", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d30d49-10", "ovs_interfaceid": "21d30d49-10fb-462d-9b5f-7525c3497178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.383377] env[62627]: DEBUG nova.scheduler.client.report [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 883.472328] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194391, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.595966] env[62627]: DEBUG nova.compute.manager [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 883.596319] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.597662] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532feedd-79d9-4af4-a19d-8db8e62afcc2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.609585] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.609585] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73b49dcf-a513-420d-abf3-22ec34364204 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.622624] env[62627]: DEBUG oslo_vmware.api [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Task: {'id': task-2194393, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.317283} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.624542] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 883.624750] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 883.624928] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 883.625136] env[62627]: INFO nova.compute.manager [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Took 1.65 seconds to destroy the instance on the hypervisor. [ 883.625448] env[62627]: DEBUG oslo.service.loopingcall [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.625733] env[62627]: DEBUG oslo_vmware.api [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 883.625733] env[62627]: value = "task-2194394" [ 883.625733] env[62627]: _type = "Task" [ 883.625733] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.625929] env[62627]: DEBUG nova.compute.manager [-] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 883.626038] env[62627]: DEBUG nova.network.neutron [-] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 883.645031] env[62627]: DEBUG oslo_vmware.api [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194394, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.797931] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f19071-b965-1d76-681f-f529edaca21b, 'name': SearchDatastore_Task, 'duration_secs': 0.022904} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.798613] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f528f08-efae-4c01-891a-9013231506af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.806885] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 883.806885] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525ab457-b041-ef3c-5c12-2112fc983058" [ 883.806885] env[62627]: _type = "Task" [ 883.806885] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.818690] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525ab457-b041-ef3c-5c12-2112fc983058, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.847737] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-a12855e2-1544-4e85-a7ff-c026fec861b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.894357] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.760s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.895215] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 883.897833] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.731s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.901886] env[62627]: INFO nova.compute.claims [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.970347] env[62627]: DEBUG oslo_vmware.api [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194391, 'name': PowerOnVM_Task, 'duration_secs': 0.51892} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.970472] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 883.974927] env[62627]: DEBUG nova.compute.manager [None req-915706f7-2ffa-4080-b14c-12ff2136b601 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 883.975988] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f212c4ec-21f2-4a0a-b906-04676d9576d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.062812] env[62627]: DEBUG nova.network.neutron [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Updated VIF entry in instance network info cache for port b0e730ac-28a5-40f0-bbe1-c4f48f060624. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.063375] env[62627]: DEBUG nova.network.neutron [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Updating instance_info_cache with network_info: [{"id": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "address": "fa:16:3e:66:a3:d6", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0e730ac-28", "ovs_interfaceid": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.139428] env[62627]: DEBUG oslo_vmware.api [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194394, 'name': PowerOffVM_Task, 'duration_secs': 0.324633} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.139662] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 884.139806] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 884.140082] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70eaec26-f5e9-4e9d-84a2-a63cd12fe1c7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.238396] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 884.238396] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 884.238396] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleting the datastore file [datastore1] a12855e2-1544-4e85-a7ff-c026fec861b6 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 884.239460] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4657067-88c4-4697-a438-cc1b2db2a361 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.250121] env[62627]: DEBUG oslo_vmware.api [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 884.250121] env[62627]: value = "task-2194396" [ 884.250121] env[62627]: _type = "Task" [ 884.250121] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.265187] env[62627]: DEBUG oslo_vmware.api [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194396, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.322808] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525ab457-b041-ef3c-5c12-2112fc983058, 'name': SearchDatastore_Task, 'duration_secs': 0.027745} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.323204] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.323573] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a/6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 884.323930] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9859b57c-8f27-40eb-82ca-224ee2d5f978 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.334585] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 884.334585] env[62627]: value = "task-2194397" [ 884.334585] env[62627]: _type = "Task" [ 884.334585] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.350903] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194397, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.353330] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1cc79297-9247-48f6-a865-8013b5aef85c tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-a12855e2-1544-4e85-a7ff-c026fec861b6-202d765c-7217-40e5-9216-6ffe27003834" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.380s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.413330] env[62627]: DEBUG nova.compute.utils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 884.417715] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 884.417979] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 884.530763] env[62627]: DEBUG nova.policy [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9511c993533466faa59ca47d51b2964', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fe72f9f87144786adbb917cf0da1b19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 884.566269] env[62627]: DEBUG oslo_concurrency.lockutils [req-d3794d0f-a561-4067-b1fe-37005b8aa070 req-e315de4a-2fa1-458c-9ce8-ab0cc8c429ec service nova] Releasing lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.768052] env[62627]: DEBUG oslo_vmware.api [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194396, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.354022} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.768554] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 884.768924] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 884.770846] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.770846] env[62627]: INFO nova.compute.manager [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Took 1.17 seconds to destroy the instance on the hypervisor. [ 884.770846] env[62627]: DEBUG oslo.service.loopingcall [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.770846] env[62627]: DEBUG nova.compute.manager [-] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 884.770846] env[62627]: DEBUG nova.network.neutron [-] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.850456] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194397, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.924465] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 885.018384] env[62627]: DEBUG nova.compute.manager [req-47302db0-b292-4319-a28a-d0cdd3650533 req-5b2663fa-b459-46e7-9b65-9e6a219ab86e service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Received event network-vif-deleted-202d765c-7217-40e5-9216-6ffe27003834 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 885.348950] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194397, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.694175} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.349309] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a/6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 885.350120] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 885.352403] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8997c22-9318-4e67-a452-9d2ced14cffb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.361152] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 885.361152] env[62627]: value = "task-2194399" [ 885.361152] env[62627]: _type = "Task" [ 885.361152] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.371425] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194399, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.558636] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3940c5-b406-4a61-8360-c978e2b541e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.569792] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c605541-7f42-4cb1-b89a-9512907e5e61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.615748] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56102d0-c2b1-46a9-b60b-39cb92339ad9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.627637] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8889cca-4faf-4d92-931b-a46dcd30fd11 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.644054] env[62627]: DEBUG nova.compute.provider_tree [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.648533] env[62627]: DEBUG nova.network.neutron [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Successfully updated port: 6bf6cde4-6fe7-4f05-a8ed-b57e621b565b {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 885.648533] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Successfully created port: 93a8b363-e41a-4ac2-abf1-274395673a5b {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 885.875233] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194399, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.132179} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.875233] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.876789] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a86a096-08ed-45a0-bbf7-cda3c34c8796 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.906951] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a/6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.907169] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2332d87-1d5a-4b63-b2f3-ee6fdc884fb2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.931125] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 885.931125] env[62627]: value = "task-2194400" [ 885.931125] env[62627]: _type = "Task" [ 885.931125] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.942799] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 885.948025] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194400, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.960652] env[62627]: DEBUG nova.network.neutron [-] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.984458] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 885.984730] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 885.984891] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 885.985093] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 885.985292] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 885.986533] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 885.986697] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 885.986742] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 885.986918] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 885.987148] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 885.987319] env[62627]: DEBUG nova.virt.hardware [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 885.988573] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0719e8ee-88d1-4acf-85f4-05a66f298d4e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.998365] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fe7d68-c6ff-465e-9b20-ba75476477d0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.096236] env[62627]: DEBUG oslo_vmware.rw_handles [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c70b21-ec9c-2fcd-e5ea-62154534973d/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 886.097407] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b6ba44-4a73-43d7-b602-eefa9bf4bec0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.108549] env[62627]: DEBUG oslo_vmware.rw_handles [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c70b21-ec9c-2fcd-e5ea-62154534973d/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 886.108549] env[62627]: ERROR oslo_vmware.rw_handles [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c70b21-ec9c-2fcd-e5ea-62154534973d/disk-0.vmdk due to incomplete transfer. [ 886.108549] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-30e599f3-4bfd-4aec-8106-6df341aef4d0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.115892] env[62627]: DEBUG oslo_vmware.rw_handles [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c70b21-ec9c-2fcd-e5ea-62154534973d/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 886.116183] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Uploaded image b6b3c30e-e15d-434a-aa3a-097b0c9be220 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 886.118476] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 886.119163] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ccd24602-c14e-4965-a486-1d8cbc3daffe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.134015] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 886.134015] env[62627]: value = "task-2194401" [ 886.134015] env[62627]: _type = "Task" [ 886.134015] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.148974] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194401, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.148974] env[62627]: DEBUG nova.scheduler.client.report [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 886.279582] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Successfully updated port: 40b19e0d-048f-4bca-bfea-412ad98b0568 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 886.442186] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194400, 'name': ReconfigVM_Task, 'duration_secs': 0.35472} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.442551] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a/6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 886.443229] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8677716f-d371-43b4-b64e-044efb4bc5e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.451180] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 886.451180] env[62627]: value = "task-2194402" [ 886.451180] env[62627]: _type = "Task" [ 886.451180] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.460869] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194402, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.467185] env[62627]: INFO nova.compute.manager [-] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Took 2.84 seconds to deallocate network for instance. [ 886.644415] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194401, 'name': Destroy_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.658017] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.758s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.658017] env[62627]: DEBUG nova.compute.manager [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 886.659779] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.358s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.660481] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.665043] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.141s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.665043] env[62627]: INFO nova.compute.claims [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.707147] env[62627]: DEBUG nova.network.neutron [-] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.708447] env[62627]: INFO nova.scheduler.client.report [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Deleted allocations for instance e5712962-ad8e-4cf2-aa47-275293609a13 [ 886.783644] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "refresh_cache-02216be1-cdb9-421a-8a23-4a919b376595" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.783820] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "refresh_cache-02216be1-cdb9-421a-8a23-4a919b376595" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.784415] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.867092] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquiring lock "66882603-6ff6-49c2-9f42-3521ecaf98b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.869065] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Lock "66882603-6ff6-49c2-9f42-3521ecaf98b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.964393] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194402, 'name': Rename_Task, 'duration_secs': 0.154386} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.964393] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 886.964393] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a9fdf93-5692-4b8e-8b9a-0a17e5d46fb9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.971690] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 886.971690] env[62627]: value = "task-2194403" [ 886.971690] env[62627]: _type = "Task" [ 886.971690] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.976222] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.982462] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.018202] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 887.018389] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447660', 'volume_id': '48409ca3-f1ae-40de-ad9d-766d53425c27', 'name': 'volume-48409ca3-f1ae-40de-ad9d-766d53425c27', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a397bce6-30c7-485d-81ab-88a8e021996c', 'attached_at': '', 'detached_at': '', 'volume_id': '48409ca3-f1ae-40de-ad9d-766d53425c27', 'serial': '48409ca3-f1ae-40de-ad9d-766d53425c27'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 887.020103] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b662397-69c2-43ad-9f9a-98f02a848e4d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.044775] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4eda86-6450-4275-b64d-ee7476e2856f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.078125] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] volume-48409ca3-f1ae-40de-ad9d-766d53425c27/volume-48409ca3-f1ae-40de-ad9d-766d53425c27.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.078125] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef97267d-46e6-49dc-923c-e9782bc15425 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.100505] env[62627]: DEBUG oslo_vmware.api [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 887.100505] env[62627]: value = "task-2194404" [ 887.100505] env[62627]: _type = "Task" [ 887.100505] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.108819] env[62627]: DEBUG oslo_vmware.api [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194404, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.144876] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194401, 'name': Destroy_Task, 'duration_secs': 0.722965} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.145612] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Destroyed the VM [ 887.145916] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 887.146206] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b4f070ba-2bb7-472b-b558-053065372cfd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.153625] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 887.153625] env[62627]: value = "task-2194405" [ 887.153625] env[62627]: _type = "Task" [ 887.153625] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.162271] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194405, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.170134] env[62627]: DEBUG nova.compute.utils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 887.173300] env[62627]: DEBUG nova.compute.manager [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 887.173503] env[62627]: DEBUG nova.network.neutron [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 887.213133] env[62627]: INFO nova.compute.manager [-] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Took 2.44 seconds to deallocate network for instance. [ 887.237542] env[62627]: DEBUG nova.policy [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0896d768b334478bb19804bbb156f4bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4db2144e94840c4a422d2d933b4a65f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 887.240844] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f74ab9a9-2e7b-4cbe-9e9f-bdbc2edd0063 tempest-ServersTestBootFromVolume-172209119 tempest-ServersTestBootFromVolume-172209119-project-member] Lock "e5712962-ad8e-4cf2-aa47-275293609a13" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.747s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.343326] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.355141] env[62627]: DEBUG nova.compute.manager [req-91f46b3e-a634-4cad-8c3f-4cf7daaca42c req-e37e6801-2d98-4b23-8440-c1340c8c07f0 service nova] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Received event network-vif-deleted-21d30d49-10fb-462d-9b5f-7525c3497178 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 887.486348] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194403, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.514953] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "38ab16cc-be4f-47b4-a818-ea864e853872" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.515304] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "38ab16cc-be4f-47b4-a818-ea864e853872" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.578143] env[62627]: DEBUG nova.compute.manager [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received event network-vif-plugged-6bf6cde4-6fe7-4f05-a8ed-b57e621b565b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 887.578398] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Acquiring lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.578570] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.578734] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.579019] env[62627]: DEBUG nova.compute.manager [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] No waiting events found dispatching network-vif-plugged-6bf6cde4-6fe7-4f05-a8ed-b57e621b565b {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 887.579156] env[62627]: WARNING nova.compute.manager [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received unexpected event network-vif-plugged-6bf6cde4-6fe7-4f05-a8ed-b57e621b565b for instance with vm_state building and task_state spawning. [ 887.579504] env[62627]: DEBUG nova.compute.manager [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Received event network-vif-deleted-5db92929-a96d-41c2-be2b-95c9ef62c4bf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 887.579504] env[62627]: DEBUG nova.compute.manager [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received event network-changed-6bf6cde4-6fe7-4f05-a8ed-b57e621b565b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 887.579775] env[62627]: DEBUG nova.compute.manager [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Refreshing instance network info cache due to event network-changed-6bf6cde4-6fe7-4f05-a8ed-b57e621b565b. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 887.579903] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Acquiring lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.579988] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Acquired lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.580120] env[62627]: DEBUG nova.network.neutron [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Refreshing network info cache for port 6bf6cde4-6fe7-4f05-a8ed-b57e621b565b {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 887.587308] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Updating instance_info_cache with network_info: [{"id": "40b19e0d-048f-4bca-bfea-412ad98b0568", "address": "fa:16:3e:05:60:d8", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40b19e0d-04", "ovs_interfaceid": "40b19e0d-048f-4bca-bfea-412ad98b0568", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.609954] env[62627]: DEBUG oslo_vmware.api [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194404, 'name': ReconfigVM_Task, 'duration_secs': 0.477182} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.610217] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Reconfigured VM instance instance-00000010 to attach disk [datastore2] volume-48409ca3-f1ae-40de-ad9d-766d53425c27/volume-48409ca3-f1ae-40de-ad9d-766d53425c27.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.615108] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03e04efc-c0d0-474e-a28b-866b053572c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.631118] env[62627]: DEBUG oslo_vmware.api [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 887.631118] env[62627]: value = "task-2194406" [ 887.631118] env[62627]: _type = "Task" [ 887.631118] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.640074] env[62627]: DEBUG oslo_vmware.api [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194406, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.665691] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194405, 'name': RemoveSnapshot_Task} progress is 31%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.674448] env[62627]: DEBUG nova.compute.manager [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 887.729507] env[62627]: DEBUG oslo_concurrency.lockutils [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.898694] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquiring lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.898816] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.930653] env[62627]: DEBUG nova.network.neutron [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Successfully created port: 4a5f7c0b-f029-481c-a96f-0d9789d6bad5 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.986459] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194403, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.036255] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Successfully updated port: 93a8b363-e41a-4ac2-abf1-274395673a5b {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.088863] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "refresh_cache-02216be1-cdb9-421a-8a23-4a919b376595" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.089191] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Instance network_info: |[{"id": "40b19e0d-048f-4bca-bfea-412ad98b0568", "address": "fa:16:3e:05:60:d8", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40b19e0d-04", "ovs_interfaceid": "40b19e0d-048f-4bca-bfea-412ad98b0568", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 888.092238] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:60:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '40b19e0d-048f-4bca-bfea-412ad98b0568', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.100879] env[62627]: DEBUG oslo.service.loopingcall [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 888.101525] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.101767] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d9fa324-7a68-4e5e-9d4f-f9fb3da501f8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.123180] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.123180] env[62627]: value = "task-2194407" [ 888.123180] env[62627]: _type = "Task" [ 888.123180] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.133666] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194407, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.141749] env[62627]: DEBUG oslo_vmware.api [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194406, 'name': ReconfigVM_Task, 'duration_secs': 0.169715} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.144300] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447660', 'volume_id': '48409ca3-f1ae-40de-ad9d-766d53425c27', 'name': 'volume-48409ca3-f1ae-40de-ad9d-766d53425c27', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a397bce6-30c7-485d-81ab-88a8e021996c', 'attached_at': '', 'detached_at': '', 'volume_id': '48409ca3-f1ae-40de-ad9d-766d53425c27', 'serial': '48409ca3-f1ae-40de-ad9d-766d53425c27'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 888.169092] env[62627]: DEBUG oslo_vmware.api [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194405, 'name': RemoveSnapshot_Task, 'duration_secs': 0.567567} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.169988] env[62627]: DEBUG nova.network.neutron [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 888.174319] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 888.174319] env[62627]: INFO nova.compute.manager [None req-802f837f-ac55-4bd1-9a7d-149642b4d044 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Took 18.22 seconds to snapshot the instance on the hypervisor. [ 888.272277] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c28a32-30e7-44a2-9dc3-f44c7d720eeb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.280724] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b3cb63-6a6c-4a97-bb33-b4f8c6c1fc06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.317433] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf4af98-9efc-438b-ad73-de0efb8a828f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.325390] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd51ebf1-9417-434c-93f4-33594936bca5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.340103] env[62627]: DEBUG nova.compute.provider_tree [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.410872] env[62627]: DEBUG nova.network.neutron [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.461114] env[62627]: DEBUG nova.network.neutron [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Successfully updated port: e9aeb5b9-eeec-406e-a7d0-6207531ec459 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.483291] env[62627]: DEBUG oslo_vmware.api [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194403, 'name': PowerOnVM_Task, 'duration_secs': 1.015367} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.483491] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 888.483689] env[62627]: INFO nova.compute.manager [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Took 10.92 seconds to spawn the instance on the hypervisor. [ 888.483846] env[62627]: DEBUG nova.compute.manager [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.484677] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d3ed45-66a7-445b-af44-ce76a9f53966 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.537606] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "refresh_cache-f1b70379-e13d-4007-ac2d-3d2d43cd5602" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.537752] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "refresh_cache-f1b70379-e13d-4007-ac2d-3d2d43cd5602" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.537921] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 888.632817] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194407, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.693074] env[62627]: DEBUG nova.compute.manager [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 888.700057] env[62627]: DEBUG oslo_concurrency.lockutils [None req-32ef16b3-e927-432e-9947-b14ab10d70c1 tempest-ServersListShow296Test-1206191576 tempest-ServersListShow296Test-1206191576-project-member] Acquiring lock "582ebeb1-e916-45f7-b569-96ae3a7acda5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.700341] env[62627]: DEBUG oslo_concurrency.lockutils [None req-32ef16b3-e927-432e-9947-b14ab10d70c1 tempest-ServersListShow296Test-1206191576 tempest-ServersListShow296Test-1206191576-project-member] Lock "582ebeb1-e916-45f7-b569-96ae3a7acda5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.718645] env[62627]: DEBUG nova.virt.hardware [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:32:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='477bc599-8618-4d09-a5b7-e98bb785a942',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1076459801',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.718873] env[62627]: DEBUG nova.virt.hardware [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.719044] env[62627]: DEBUG nova.virt.hardware [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.720008] env[62627]: DEBUG nova.virt.hardware [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.720008] env[62627]: DEBUG nova.virt.hardware [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.720008] env[62627]: DEBUG nova.virt.hardware [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.720008] env[62627]: DEBUG nova.virt.hardware [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.721050] env[62627]: DEBUG nova.virt.hardware [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.721390] env[62627]: DEBUG nova.virt.hardware [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.721790] env[62627]: DEBUG nova.virt.hardware [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.722096] env[62627]: DEBUG nova.virt.hardware [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.723438] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6491e2-056a-431d-8af2-a99c45477579 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.731704] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375fdfd1-e8e6-4605-9c30-5604c6e0465c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.844912] env[62627]: DEBUG nova.scheduler.client.report [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 888.913155] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Releasing lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.913446] env[62627]: DEBUG nova.compute.manager [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Received event network-vif-plugged-40b19e0d-048f-4bca-bfea-412ad98b0568 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 888.913650] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Acquiring lock "02216be1-cdb9-421a-8a23-4a919b376595-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.913833] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Lock "02216be1-cdb9-421a-8a23-4a919b376595-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.913991] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Lock "02216be1-cdb9-421a-8a23-4a919b376595-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.914252] env[62627]: DEBUG nova.compute.manager [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] No waiting events found dispatching network-vif-plugged-40b19e0d-048f-4bca-bfea-412ad98b0568 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 888.914440] env[62627]: WARNING nova.compute.manager [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Received unexpected event network-vif-plugged-40b19e0d-048f-4bca-bfea-412ad98b0568 for instance with vm_state building and task_state spawning. [ 888.914600] env[62627]: DEBUG nova.compute.manager [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Received event network-changed-40b19e0d-048f-4bca-bfea-412ad98b0568 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 888.914753] env[62627]: DEBUG nova.compute.manager [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Refreshing instance network info cache due to event network-changed-40b19e0d-048f-4bca-bfea-412ad98b0568. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 888.914928] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Acquiring lock "refresh_cache-02216be1-cdb9-421a-8a23-4a919b376595" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.915074] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Acquired lock "refresh_cache-02216be1-cdb9-421a-8a23-4a919b376595" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.915278] env[62627]: DEBUG nova.network.neutron [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Refreshing network info cache for port 40b19e0d-048f-4bca-bfea-412ad98b0568 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 889.011891] env[62627]: INFO nova.compute.manager [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Took 55.08 seconds to build instance. [ 889.096355] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.141081] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194407, 'name': CreateVM_Task, 'duration_secs': 0.605433} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.141359] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 889.142166] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.142477] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.142881] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 889.146415] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5b8a1c4-7d62-4f80-a8c1-b24b3c4f567a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.152683] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 889.152683] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525c74c9-6912-2175-35d3-8f428a2a51b6" [ 889.152683] env[62627]: _type = "Task" [ 889.152683] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.165212] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525c74c9-6912-2175-35d3-8f428a2a51b6, 'name': SearchDatastore_Task, 'duration_secs': 0.010343} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.165212] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.165516] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 889.165694] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.165838] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.166103] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 889.166444] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adb64ef2-2326-49ae-b11d-98ed5124a39a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.175130] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 889.175217] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 889.176078] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35554fa3-52bf-4c38-afb3-040fe7911e66 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.181525] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 889.181525] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52592e23-e7d9-a684-eba4-1367644cdcad" [ 889.181525] env[62627]: _type = "Task" [ 889.181525] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.194014] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52592e23-e7d9-a684-eba4-1367644cdcad, 'name': SearchDatastore_Task, 'duration_secs': 0.009263} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.195492] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a74dc82f-0128-41a1-8cea-145f60da0da2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.199879] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 889.199879] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525ba6b0-d08c-4b78-99f6-7c6b6baf76be" [ 889.199879] env[62627]: _type = "Task" [ 889.199879] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.208329] env[62627]: DEBUG nova.objects.instance [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lazy-loading 'flavor' on Instance uuid a397bce6-30c7-485d-81ab-88a8e021996c {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.209847] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525ba6b0-d08c-4b78-99f6-7c6b6baf76be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.352625] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.352625] env[62627]: DEBUG nova.compute.manager [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 889.355125] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.994s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.355352] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.357394] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.100s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.357583] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.359317] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.758s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.360728] env[62627]: INFO nova.compute.claims [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 889.400443] env[62627]: INFO nova.scheduler.client.report [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Deleted allocations for instance 32a3b8bb-7c3a-4e30-8a50-37ce53e71882 [ 889.408823] env[62627]: INFO nova.scheduler.client.report [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Deleted allocations for instance 8af5ca34-f9c4-4b68-b0f7-7163f388b778 [ 889.513472] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0052d87-8acd-4c6a-b3c4-830226a12253 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.206s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.651983] env[62627]: DEBUG nova.compute.manager [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 889.652242] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbe4cf3-d490-45e0-9dc2-d121c511317c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.674419] env[62627]: DEBUG nova.network.neutron [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Updated VIF entry in instance network info cache for port 40b19e0d-048f-4bca-bfea-412ad98b0568. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 889.674793] env[62627]: DEBUG nova.network.neutron [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Updating instance_info_cache with network_info: [{"id": "40b19e0d-048f-4bca-bfea-412ad98b0568", "address": "fa:16:3e:05:60:d8", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap40b19e0d-04", "ovs_interfaceid": "40b19e0d-048f-4bca-bfea-412ad98b0568", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.716439] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525ba6b0-d08c-4b78-99f6-7c6b6baf76be, 'name': SearchDatastore_Task, 'duration_secs': 0.008209} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.716439] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.716439] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 02216be1-cdb9-421a-8a23-4a919b376595/02216be1-cdb9-421a-8a23-4a919b376595.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 889.716439] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b56a1d68-fa27-4d53-ba3a-64b2dde2fcd9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.719274] env[62627]: DEBUG oslo_concurrency.lockutils [None req-649b49bf-f015-4632-be3e-35fc5bfe58c9 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.883s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.725782] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 889.725782] env[62627]: value = "task-2194408" [ 889.725782] env[62627]: _type = "Task" [ 889.725782] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.739225] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194408, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.774999] env[62627]: DEBUG nova.network.neutron [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Updating instance_info_cache with network_info: [{"id": "93a8b363-e41a-4ac2-abf1-274395673a5b", "address": "fa:16:3e:f2:1c:ba", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93a8b363-e4", "ovs_interfaceid": "93a8b363-e41a-4ac2-abf1-274395673a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.866863] env[62627]: DEBUG nova.compute.utils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 889.871307] env[62627]: DEBUG nova.compute.manager [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 889.871480] env[62627]: DEBUG nova.network.neutron [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 889.912275] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9ee09dc4-abf3-444a-b97a-c68df2d6168f tempest-ServerAddressesTestJSON-425948700 tempest-ServerAddressesTestJSON-425948700-project-member] Lock "32a3b8bb-7c3a-4e30-8a50-37ce53e71882" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.264s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.918593] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14e13a18-54b9-4ee7-8a29-56a51d628a23 tempest-ImagesOneServerTestJSON-1340245844 tempest-ImagesOneServerTestJSON-1340245844-project-member] Lock "8af5ca34-f9c4-4b68-b0f7-7163f388b778" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.481s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.015982] env[62627]: DEBUG nova.compute.manager [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 890.034921] env[62627]: DEBUG nova.policy [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d6a2c615f034c49836031736531061c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01b0f3fe63f64e1183acc0fec83bb3f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 890.165957] env[62627]: INFO nova.compute.manager [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] instance snapshotting [ 890.169719] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a73a51-7902-459a-bd77-f68bc3162c0b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.177201] env[62627]: DEBUG oslo_concurrency.lockutils [req-db335205-642e-4579-b861-4dfcfe9753ea req-5df3aa70-397a-419d-a84b-341c3930f9d6 service nova] Releasing lock "refresh_cache-02216be1-cdb9-421a-8a23-4a919b376595" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.200522] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9797d3-8f42-4131-887c-d24fabe3474b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.210520] env[62627]: DEBUG nova.compute.manager [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Received event network-vif-plugged-93a8b363-e41a-4ac2-abf1-274395673a5b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 890.210763] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Acquiring lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.211048] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.211172] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.211346] env[62627]: DEBUG nova.compute.manager [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] No waiting events found dispatching network-vif-plugged-93a8b363-e41a-4ac2-abf1-274395673a5b {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 890.211507] env[62627]: WARNING nova.compute.manager [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Received unexpected event network-vif-plugged-93a8b363-e41a-4ac2-abf1-274395673a5b for instance with vm_state building and task_state spawning. [ 890.211666] env[62627]: DEBUG nova.compute.manager [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Received event network-changed-93a8b363-e41a-4ac2-abf1-274395673a5b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 890.211953] env[62627]: DEBUG nova.compute.manager [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Refreshing instance network info cache due to event network-changed-93a8b363-e41a-4ac2-abf1-274395673a5b. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 890.212053] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Acquiring lock "refresh_cache-f1b70379-e13d-4007-ac2d-3d2d43cd5602" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.236772] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194408, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.278739] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "refresh_cache-f1b70379-e13d-4007-ac2d-3d2d43cd5602" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.281036] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Instance network_info: |[{"id": "93a8b363-e41a-4ac2-abf1-274395673a5b", "address": "fa:16:3e:f2:1c:ba", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93a8b363-e4", "ovs_interfaceid": "93a8b363-e41a-4ac2-abf1-274395673a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 890.281036] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Acquired lock "refresh_cache-f1b70379-e13d-4007-ac2d-3d2d43cd5602" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.281036] env[62627]: DEBUG nova.network.neutron [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Refreshing network info cache for port 93a8b363-e41a-4ac2-abf1-274395673a5b {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.288287] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:1c:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93a8b363-e41a-4ac2-abf1-274395673a5b', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.299020] env[62627]: DEBUG oslo.service.loopingcall [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.299020] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.299020] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df1414b6-17ab-4437-af65-fa8268953a2d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.319043] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.319043] env[62627]: value = "task-2194409" [ 890.319043] env[62627]: _type = "Task" [ 890.319043] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.332034] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194409, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.372976] env[62627]: DEBUG nova.compute.manager [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 890.454262] env[62627]: DEBUG nova.compute.manager [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 890.455194] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2032e8c5-2298-4bb8-ab29-d2643840a11b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.521343] env[62627]: DEBUG nova.network.neutron [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Successfully updated port: 4a5f7c0b-f029-481c-a96f-0d9789d6bad5 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 890.549210] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.670036] env[62627]: DEBUG nova.network.neutron [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Successfully created port: 993b71f3-f13a-4db1-b2b7-2da39bd4d41e {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.727015] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 890.727713] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2b98ad53-fc8d-4b26-8af7-3bc09235d697 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.741016] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194408, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566151} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.745174] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 02216be1-cdb9-421a-8a23-4a919b376595/02216be1-cdb9-421a-8a23-4a919b376595.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 890.745174] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 890.745174] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 890.745174] env[62627]: value = "task-2194410" [ 890.745174] env[62627]: _type = "Task" [ 890.745174] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.746562] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d126b25e-62d2-458b-8384-8bf2c6339642 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.765277] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194410, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.766525] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 890.766525] env[62627]: value = "task-2194411" [ 890.766525] env[62627]: _type = "Task" [ 890.766525] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.785772] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194411, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.831818] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "a397bce6-30c7-485d-81ab-88a8e021996c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.832109] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.833521] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194409, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.949704] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36865ea7-0da8-4752-b183-ec2c4e3ace6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.958366] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4318ad11-16a9-4826-9405-6406dd3ed8ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.992878] env[62627]: INFO nova.compute.manager [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] instance snapshotting [ 890.996374] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a4508d-15cc-4b9b-ad35-98b7651761a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.002025] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b1d76d-e482-4a8a-980f-19dac53c9593 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.023443] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea65cbe-9321-4662-ac5e-710a61c29b1f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.030775] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.030918] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.031085] env[62627]: DEBUG nova.network.neutron [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.033450] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f09ba5-12aa-43c8-9bf3-88ffd7f39587 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.048299] env[62627]: DEBUG nova.compute.provider_tree [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.260474] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194410, 'name': CreateSnapshot_Task, 'duration_secs': 0.484866} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.260743] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 891.262241] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08a0daf-107c-4de7-83d5-7b5a93b25444 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.285404] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194411, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084114} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.285679] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 891.286504] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de139168-d7e9-40d9-a2b1-fdfe9e506436 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.314395] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] 02216be1-cdb9-421a-8a23-4a919b376595/02216be1-cdb9-421a-8a23-4a919b376595.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.315812] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8af1ebab-93d2-47cc-8cbb-30dd7dcb9f55 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.338678] env[62627]: INFO nova.compute.manager [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Detaching volume 48409ca3-f1ae-40de-ad9d-766d53425c27 [ 891.345592] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 891.345592] env[62627]: value = "task-2194412" [ 891.345592] env[62627]: _type = "Task" [ 891.345592] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.347091] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194409, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.357734] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.387543] env[62627]: DEBUG nova.compute.manager [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 891.407130] env[62627]: INFO nova.virt.block_device [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Attempting to driver detach volume 48409ca3-f1ae-40de-ad9d-766d53425c27 from mountpoint /dev/sdb [ 891.407377] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 891.407569] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447660', 'volume_id': '48409ca3-f1ae-40de-ad9d-766d53425c27', 'name': 'volume-48409ca3-f1ae-40de-ad9d-766d53425c27', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a397bce6-30c7-485d-81ab-88a8e021996c', 'attached_at': '', 'detached_at': '', 'volume_id': '48409ca3-f1ae-40de-ad9d-766d53425c27', 'serial': '48409ca3-f1ae-40de-ad9d-766d53425c27'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 891.408884] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a286c89-329a-4bfc-abc7-2f6a298ae63b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.435312] env[62627]: DEBUG nova.virt.hardware [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 891.435559] env[62627]: DEBUG nova.virt.hardware [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.435716] env[62627]: DEBUG nova.virt.hardware [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 891.435900] env[62627]: DEBUG nova.virt.hardware [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.436059] env[62627]: DEBUG nova.virt.hardware [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 891.436214] env[62627]: DEBUG nova.virt.hardware [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 891.436420] env[62627]: DEBUG nova.virt.hardware [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 891.436600] env[62627]: DEBUG nova.virt.hardware [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 891.436776] env[62627]: DEBUG nova.virt.hardware [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 891.436939] env[62627]: DEBUG nova.virt.hardware [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 891.437128] env[62627]: DEBUG nova.virt.hardware [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 891.438109] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5b869d-2cc3-45da-9e23-0e3baff1eca3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.441304] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eec33dd-bd00-4552-b71a-14955e7b681b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.450924] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfaed612-df0e-4f36-b6a5-8852acd829fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.455182] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbca410c-6d3e-4982-b966-08426316e46b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.470409] env[62627]: DEBUG nova.network.neutron [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Updated VIF entry in instance network info cache for port 93a8b363-e41a-4ac2-abf1-274395673a5b. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.470804] env[62627]: DEBUG nova.network.neutron [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Updating instance_info_cache with network_info: [{"id": "93a8b363-e41a-4ac2-abf1-274395673a5b", "address": "fa:16:3e:f2:1c:ba", "network": {"id": "529dc9a6-8843-4139-a315-ff9ae56f4d81", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1514488580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fe72f9f87144786adbb917cf0da1b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93a8b363-e4", "ovs_interfaceid": "93a8b363-e41a-4ac2-abf1-274395673a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.493727] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8093ed04-7b5c-4876-ba92-1ec15a183b62 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.510330] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] The volume has not been displaced from its original location: [datastore2] volume-48409ca3-f1ae-40de-ad9d-766d53425c27/volume-48409ca3-f1ae-40de-ad9d-766d53425c27.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 891.516540] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Reconfiguring VM instance instance-00000010 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 891.517220] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b66c898-1b25-4b04-a1db-fe4f4d3c028d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.535662] env[62627]: DEBUG oslo_vmware.api [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 891.535662] env[62627]: value = "task-2194413" [ 891.535662] env[62627]: _type = "Task" [ 891.535662] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.546318] env[62627]: DEBUG oslo_vmware.api [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194413, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.551733] env[62627]: DEBUG nova.scheduler.client.report [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 891.556948] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 891.557245] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f603db96-fabd-4864-a43b-c09fe25ce0c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.564601] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 891.564601] env[62627]: value = "task-2194414" [ 891.564601] env[62627]: _type = "Task" [ 891.564601] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.574050] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194414, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.638552] env[62627]: DEBUG nova.network.neutron [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.786613] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 891.786939] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f9c93a57-4a61-4310-a966-1d32036ac2d1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.800160] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 891.800160] env[62627]: value = "task-2194415" [ 891.800160] env[62627]: _type = "Task" [ 891.800160] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.818023] env[62627]: DEBUG nova.compute.manager [req-660cc2a0-afaf-4b8e-bd3f-2079062db0e4 req-81b64a83-3b98-4093-b15b-7407e1ed3844 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Received event network-changed-b0e730ac-28a5-40f0-bbe1-c4f48f060624 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 891.818023] env[62627]: DEBUG nova.compute.manager [req-660cc2a0-afaf-4b8e-bd3f-2079062db0e4 req-81b64a83-3b98-4093-b15b-7407e1ed3844 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Refreshing instance network info cache due to event network-changed-b0e730ac-28a5-40f0-bbe1-c4f48f060624. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 891.818023] env[62627]: DEBUG oslo_concurrency.lockutils [req-660cc2a0-afaf-4b8e-bd3f-2079062db0e4 req-81b64a83-3b98-4093-b15b-7407e1ed3844 service nova] Acquiring lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.818023] env[62627]: DEBUG oslo_concurrency.lockutils [req-660cc2a0-afaf-4b8e-bd3f-2079062db0e4 req-81b64a83-3b98-4093-b15b-7407e1ed3844 service nova] Acquired lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.818023] env[62627]: DEBUG nova.network.neutron [req-660cc2a0-afaf-4b8e-bd3f-2079062db0e4 req-81b64a83-3b98-4093-b15b-7407e1ed3844 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Refreshing network info cache for port b0e730ac-28a5-40f0-bbe1-c4f48f060624 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.818784] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194415, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.845817] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194409, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.857744] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.974114] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Releasing lock "refresh_cache-f1b70379-e13d-4007-ac2d-3d2d43cd5602" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.974421] env[62627]: DEBUG nova.compute.manager [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received event network-vif-plugged-e9aeb5b9-eeec-406e-a7d0-6207531ec459 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 891.974642] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Acquiring lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.974930] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.975070] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.975518] env[62627]: DEBUG nova.compute.manager [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] No waiting events found dispatching network-vif-plugged-e9aeb5b9-eeec-406e-a7d0-6207531ec459 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 891.975518] env[62627]: WARNING nova.compute.manager [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received unexpected event network-vif-plugged-e9aeb5b9-eeec-406e-a7d0-6207531ec459 for instance with vm_state building and task_state spawning. [ 891.975588] env[62627]: DEBUG nova.compute.manager [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received event network-changed-e9aeb5b9-eeec-406e-a7d0-6207531ec459 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 891.975740] env[62627]: DEBUG nova.compute.manager [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Refreshing instance network info cache due to event network-changed-e9aeb5b9-eeec-406e-a7d0-6207531ec459. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 891.975925] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Acquiring lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.976071] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Acquired lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.976237] env[62627]: DEBUG nova.network.neutron [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Refreshing network info cache for port e9aeb5b9-eeec-406e-a7d0-6207531ec459 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.049924] env[62627]: DEBUG oslo_vmware.api [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194413, 'name': ReconfigVM_Task, 'duration_secs': 0.420073} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.050435] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Reconfigured VM instance instance-00000010 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 892.059751] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bdc4d54-af5c-49fa-b573-35427eef3f4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.068642] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.709s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.069185] env[62627]: DEBUG nova.compute.manager [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 892.072080] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.173s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.073493] env[62627]: INFO nova.compute.claims [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.085437] env[62627]: DEBUG oslo_vmware.api [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 892.085437] env[62627]: value = "task-2194416" [ 892.085437] env[62627]: _type = "Task" [ 892.085437] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.089300] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194414, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.108739] env[62627]: DEBUG oslo_vmware.api [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194416, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.201370] env[62627]: DEBUG nova.network.neutron [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance_info_cache with network_info: [{"id": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "address": "fa:16:3e:6b:ef:0c", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5f7c0b-f0", "ovs_interfaceid": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.252141] env[62627]: DEBUG nova.network.neutron [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Successfully updated port: e8a543dc-e58e-4c74-a763-b795643a2d53 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.310605] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194415, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.343477] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194409, 'name': CreateVM_Task, 'duration_secs': 1.583072} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.343732] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 892.344359] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.344524] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.344898] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 892.345111] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87f12e2a-7447-4ba6-8273-6c7c26a9e093 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.353018] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 892.353018] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fe9cd5-e5c7-da98-709f-4123a83d2e80" [ 892.353018] env[62627]: _type = "Task" [ 892.353018] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.356765] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194412, 'name': ReconfigVM_Task, 'duration_secs': 0.828228} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.359815] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Reconfigured VM instance instance-0000002b to attach disk [datastore2] 02216be1-cdb9-421a-8a23-4a919b376595/02216be1-cdb9-421a-8a23-4a919b376595.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.361246] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a64229d-c9b4-4a1b-87e2-98f30ea06b18 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.367249] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fe9cd5-e5c7-da98-709f-4123a83d2e80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.368535] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 892.368535] env[62627]: value = "task-2194417" [ 892.368535] env[62627]: _type = "Task" [ 892.368535] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.380555] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194417, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.449946] env[62627]: DEBUG nova.compute.manager [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Received event network-vif-plugged-4a5f7c0b-f029-481c-a96f-0d9789d6bad5 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 892.450242] env[62627]: DEBUG oslo_concurrency.lockutils [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] Acquiring lock "892fc84b-02a9-4fa0-81b5-80326a060c64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.450590] env[62627]: DEBUG oslo_concurrency.lockutils [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] Lock "892fc84b-02a9-4fa0-81b5-80326a060c64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.450684] env[62627]: DEBUG oslo_concurrency.lockutils [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] Lock "892fc84b-02a9-4fa0-81b5-80326a060c64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.450934] env[62627]: DEBUG nova.compute.manager [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] No waiting events found dispatching network-vif-plugged-4a5f7c0b-f029-481c-a96f-0d9789d6bad5 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 892.451112] env[62627]: WARNING nova.compute.manager [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Received unexpected event network-vif-plugged-4a5f7c0b-f029-481c-a96f-0d9789d6bad5 for instance with vm_state building and task_state spawning. [ 892.451317] env[62627]: DEBUG nova.compute.manager [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Received event network-changed-4a5f7c0b-f029-481c-a96f-0d9789d6bad5 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 892.451661] env[62627]: DEBUG nova.compute.manager [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Refreshing instance network info cache due to event network-changed-4a5f7c0b-f029-481c-a96f-0d9789d6bad5. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 892.451661] env[62627]: DEBUG oslo_concurrency.lockutils [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] Acquiring lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.583684] env[62627]: DEBUG nova.compute.utils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 892.586206] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194414, 'name': CreateSnapshot_Task, 'duration_secs': 0.744682} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.588424] env[62627]: DEBUG nova.compute.manager [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 892.588778] env[62627]: DEBUG nova.network.neutron [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 892.590904] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 892.592470] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78636303-290c-4766-bc03-a76bf06f7004 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.612026] env[62627]: DEBUG oslo_vmware.api [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194416, 'name': ReconfigVM_Task, 'duration_secs': 0.159177} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.615182] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447660', 'volume_id': '48409ca3-f1ae-40de-ad9d-766d53425c27', 'name': 'volume-48409ca3-f1ae-40de-ad9d-766d53425c27', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a397bce6-30c7-485d-81ab-88a8e021996c', 'attached_at': '', 'detached_at': '', 'volume_id': '48409ca3-f1ae-40de-ad9d-766d53425c27', 'serial': '48409ca3-f1ae-40de-ad9d-766d53425c27'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 892.683827] env[62627]: DEBUG nova.policy [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c72c1ac25b9f4aeea2c5fb37afd37598', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14238d8125674b63a7cf4452cb260283', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 892.701908] env[62627]: DEBUG nova.network.neutron [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 892.704391] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.704391] env[62627]: DEBUG nova.compute.manager [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Instance network_info: |[{"id": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "address": "fa:16:3e:6b:ef:0c", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5f7c0b-f0", "ovs_interfaceid": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 892.704643] env[62627]: DEBUG oslo_concurrency.lockutils [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] Acquired lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.704818] env[62627]: DEBUG nova.network.neutron [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Refreshing network info cache for port 4a5f7c0b-f029-481c-a96f-0d9789d6bad5 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.707938] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:ef:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a5f7c0b-f029-481c-a96f-0d9789d6bad5', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.715412] env[62627]: DEBUG oslo.service.loopingcall [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.718837] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.719624] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7c7df03-f8d7-414a-9e53-0dbff9b2d615 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.743180] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.743180] env[62627]: value = "task-2194418" [ 892.743180] env[62627]: _type = "Task" [ 892.743180] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.756274] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.756274] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194418, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.812136] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194415, 'name': CloneVM_Task} progress is 95%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.869771] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fe9cd5-e5c7-da98-709f-4123a83d2e80, 'name': SearchDatastore_Task, 'duration_secs': 0.018818} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.873300] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.873569] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.873827] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.874010] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.874241] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.876869] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed9fad30-0784-4d4e-8726-43f0c9c2aafa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.884949] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194417, 'name': Rename_Task, 'duration_secs': 0.146744} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.886187] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 892.887020] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.887020] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 892.887683] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53c4f924-e1e2-4821-ad92-8cad3b3ac421 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.889141] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87cd25a4-c0a7-41f5-949a-c8a702b1f1e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.895576] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 892.895576] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5271e23f-4839-16db-065d-89cbe0ba3773" [ 892.895576] env[62627]: _type = "Task" [ 892.895576] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.897104] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 892.897104] env[62627]: value = "task-2194419" [ 892.897104] env[62627]: _type = "Task" [ 892.897104] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.912069] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5271e23f-4839-16db-065d-89cbe0ba3773, 'name': SearchDatastore_Task, 'duration_secs': 0.01058} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.916612] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194419, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.916868] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe374f16-88de-481e-9a37-a285a1a7855b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.922877] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 892.922877] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f939cc-7ab2-1f1d-5e0a-eeb912974a3b" [ 892.922877] env[62627]: _type = "Task" [ 892.922877] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.931980] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f939cc-7ab2-1f1d-5e0a-eeb912974a3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.087503] env[62627]: DEBUG nova.compute.manager [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 893.119771] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 893.123147] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0788d6c0-4654-4885-8ade-2ad84d4ed747 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.133863] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 893.133863] env[62627]: value = "task-2194420" [ 893.133863] env[62627]: _type = "Task" [ 893.133863] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.155717] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194420, 'name': CloneVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.162723] env[62627]: DEBUG nova.network.neutron [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Successfully created port: dca7e38e-83c7-4679-824f-f98d9fc325ad {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.175981] env[62627]: DEBUG nova.objects.instance [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lazy-loading 'flavor' on Instance uuid a397bce6-30c7-485d-81ab-88a8e021996c {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.253930] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194418, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.299266] env[62627]: DEBUG nova.network.neutron [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.318557] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194415, 'name': CloneVM_Task, 'duration_secs': 1.17391} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.319890] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Created linked-clone VM from snapshot [ 893.320918] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210781fc-45d2-4c3f-98f6-0534b85cbc3c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.336299] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Uploading image 0bd31f80-af13-4889-9031-2891b7400c89 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 893.362386] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 893.362386] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-654396ff-be03-47e1-9e02-9cde5e5b782f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.380052] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 893.380052] env[62627]: value = "task-2194421" [ 893.380052] env[62627]: _type = "Task" [ 893.380052] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.398056] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194421, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.419626] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194419, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.437684] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f939cc-7ab2-1f1d-5e0a-eeb912974a3b, 'name': SearchDatastore_Task, 'duration_secs': 0.012356} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.441661] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.442111] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] f1b70379-e13d-4007-ac2d-3d2d43cd5602/f1b70379-e13d-4007-ac2d-3d2d43cd5602.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 893.443210] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-619df745-457a-492b-aac5-e5bec99220e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.455349] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 893.455349] env[62627]: value = "task-2194422" [ 893.455349] env[62627]: _type = "Task" [ 893.455349] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.468579] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194422, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.649986] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194420, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.719308] env[62627]: DEBUG nova.network.neutron [req-660cc2a0-afaf-4b8e-bd3f-2079062db0e4 req-81b64a83-3b98-4093-b15b-7407e1ed3844 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Updated VIF entry in instance network info cache for port b0e730ac-28a5-40f0-bbe1-c4f48f060624. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.719835] env[62627]: DEBUG nova.network.neutron [req-660cc2a0-afaf-4b8e-bd3f-2079062db0e4 req-81b64a83-3b98-4093-b15b-7407e1ed3844 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Updating instance_info_cache with network_info: [{"id": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "address": "fa:16:3e:66:a3:d6", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0e730ac-28", "ovs_interfaceid": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.726578] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558dbc87-04f1-4c34-921f-52b9ebd176b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.735483] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7fd256-ca0d-4afa-8c24-60da1a85d386 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.774249] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467561f2-8300-494a-98ec-18aafd8ddf2e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.794316] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194418, 'name': CreateVM_Task, 'duration_secs': 0.5376} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.794647] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 893.796098] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79d50d9-188b-4dc1-a9a4-3ad968a23033 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.802019] env[62627]: DEBUG nova.network.neutron [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updated VIF entry in instance network info cache for port 4a5f7c0b-f029-481c-a96f-0d9789d6bad5. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.802439] env[62627]: DEBUG nova.network.neutron [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance_info_cache with network_info: [{"id": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "address": "fa:16:3e:6b:ef:0c", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5f7c0b-f0", "ovs_interfaceid": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.804315] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.804501] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.804870] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 893.805209] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb5b78c2-f044-4f84-8a80-99a694593210 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.808968] env[62627]: DEBUG oslo_concurrency.lockutils [req-76f61e1a-9ad6-433c-8ca1-3727665257e6 req-b9d26a15-9a72-4f9d-88d6-892b05c70aab service nova] Releasing lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.810575] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquired lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.810575] env[62627]: DEBUG nova.network.neutron [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 893.824619] env[62627]: DEBUG nova.compute.provider_tree [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.830711] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 893.830711] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522eeb32-e268-2114-5eb6-ebe0c8f80b82" [ 893.830711] env[62627]: _type = "Task" [ 893.830711] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.843425] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522eeb32-e268-2114-5eb6-ebe0c8f80b82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.899458] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194421, 'name': Destroy_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.900942] env[62627]: DEBUG nova.network.neutron [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 893.913560] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194419, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.967925] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194422, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.058295] env[62627]: DEBUG nova.network.neutron [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Successfully updated port: 993b71f3-f13a-4db1-b2b7-2da39bd4d41e {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 894.060190] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquiring lock "c560b907-500b-42fd-851b-61a04472b660" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.060430] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "c560b907-500b-42fd-851b-61a04472b660" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.101172] env[62627]: DEBUG nova.compute.manager [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 894.133368] env[62627]: DEBUG nova.virt.hardware [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 894.133619] env[62627]: DEBUG nova.virt.hardware [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.133774] env[62627]: DEBUG nova.virt.hardware [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 894.133951] env[62627]: DEBUG nova.virt.hardware [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.134437] env[62627]: DEBUG nova.virt.hardware [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 894.134638] env[62627]: DEBUG nova.virt.hardware [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 894.134852] env[62627]: DEBUG nova.virt.hardware [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 894.135026] env[62627]: DEBUG nova.virt.hardware [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 894.135207] env[62627]: DEBUG nova.virt.hardware [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 894.135445] env[62627]: DEBUG nova.virt.hardware [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 894.135623] env[62627]: DEBUG nova.virt.hardware [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 894.136779] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48780d6f-4842-44a4-8d74-54071e10880c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.154072] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5676957-b6e5-4efc-ba6c-045e74fd9f41 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.159316] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194420, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.188421] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac49ffcb-c938-4b93-b39b-4ae1ddf97fca tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.356s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.225327] env[62627]: DEBUG oslo_concurrency.lockutils [req-660cc2a0-afaf-4b8e-bd3f-2079062db0e4 req-81b64a83-3b98-4093-b15b-7407e1ed3844 service nova] Releasing lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.309616] env[62627]: DEBUG oslo_concurrency.lockutils [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] Releasing lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.310529] env[62627]: DEBUG nova.compute.manager [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received event network-vif-plugged-e8a543dc-e58e-4c74-a763-b795643a2d53 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 894.310529] env[62627]: DEBUG oslo_concurrency.lockutils [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] Acquiring lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.310529] env[62627]: DEBUG oslo_concurrency.lockutils [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.310529] env[62627]: DEBUG oslo_concurrency.lockutils [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.312299] env[62627]: DEBUG nova.compute.manager [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] No waiting events found dispatching network-vif-plugged-e8a543dc-e58e-4c74-a763-b795643a2d53 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 894.312299] env[62627]: WARNING nova.compute.manager [req-3f798eff-c254-462a-96c4-62f943301d9f req-2ac01e17-aef1-4c03-ae25-c19d89169d38 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received unexpected event network-vif-plugged-e8a543dc-e58e-4c74-a763-b795643a2d53 for instance with vm_state building and task_state spawning. [ 894.333146] env[62627]: DEBUG nova.scheduler.client.report [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 894.349055] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522eeb32-e268-2114-5eb6-ebe0c8f80b82, 'name': SearchDatastore_Task, 'duration_secs': 0.072613} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.349055] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.349055] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 894.349055] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.349055] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.349055] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 894.349055] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-760c4312-89b5-4d9e-b0cf-49d975784ca5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.357622] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 894.358106] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 894.359937] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85e64eed-a013-4456-a08e-fd49c04fa83a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.373431] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 894.373431] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dd42ca-62bb-39b4-18c6-ed06f1fe5b98" [ 894.373431] env[62627]: _type = "Task" [ 894.373431] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.382192] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dd42ca-62bb-39b4-18c6-ed06f1fe5b98, 'name': SearchDatastore_Task, 'duration_secs': 0.00958} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.386667] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-102ce8cc-9045-4308-b4be-c9924d3e4c6d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.397454] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194421, 'name': Destroy_Task, 'duration_secs': 0.739569} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.401358] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Destroyed the VM [ 894.401798] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 894.402469] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 894.402469] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dd0cfa-f2c0-1be0-fa09-db98962d2cf2" [ 894.402469] env[62627]: _type = "Task" [ 894.402469] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.402693] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-530a76a1-810b-404b-9bb6-8c3ce48f15c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.418191] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194419, 'name': PowerOnVM_Task, 'duration_secs': 1.135754} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.422743] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.423019] env[62627]: INFO nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Took 11.25 seconds to spawn the instance on the hypervisor. [ 894.423248] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.423591] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dd0cfa-f2c0-1be0-fa09-db98962d2cf2, 'name': SearchDatastore_Task, 'duration_secs': 0.013094} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.424031] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 894.424031] env[62627]: value = "task-2194423" [ 894.424031] env[62627]: _type = "Task" [ 894.424031] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.424635] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374e50a9-151b-4c1c-b7be-3311144d5bd8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.427366] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.427660] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 892fc84b-02a9-4fa0-81b5-80326a060c64/892fc84b-02a9-4fa0-81b5-80326a060c64.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 894.427976] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ace80ce-cf41-4deb-8c3f-45d02a12b599 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.447370] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194423, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.447694] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 894.447694] env[62627]: value = "task-2194424" [ 894.447694] env[62627]: _type = "Task" [ 894.447694] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.456448] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194424, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.466249] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194422, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579487} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.466249] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] f1b70379-e13d-4007-ac2d-3d2d43cd5602/f1b70379-e13d-4007-ac2d-3d2d43cd5602.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 894.466382] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 894.467231] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffe909df-5a2b-4744-9329-f0e373107f62 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.477153] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 894.477153] env[62627]: value = "task-2194425" [ 894.477153] env[62627]: _type = "Task" [ 894.477153] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.497288] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194425, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.562534] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "refresh_cache-e76bdb57-4a30-4222-ad4c-d95e4e6193cb" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.562722] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "refresh_cache-e76bdb57-4a30-4222-ad4c-d95e4e6193cb" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.562904] env[62627]: DEBUG nova.network.neutron [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 894.601283] env[62627]: DEBUG nova.network.neutron [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Updating instance_info_cache with network_info: [{"id": "6bf6cde4-6fe7-4f05-a8ed-b57e621b565b", "address": "fa:16:3e:65:58:4e", "network": {"id": "a2b42bd2-d4d5-4d9a-9f84-0a6c9158114f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-988988055", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bf6cde4-6f", "ovs_interfaceid": "6bf6cde4-6fe7-4f05-a8ed-b57e621b565b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e9aeb5b9-eeec-406e-a7d0-6207531ec459", "address": "fa:16:3e:1c:d1:bb", "network": {"id": "3f097461-afdd-4fac-a30a-555f765c38e6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1093830906", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb18870e-f482-4c7b-8cd4-5c933d3ad294", "external-id": "nsx-vlan-transportzone-76", "segmentation_id": 76, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9aeb5b9-ee", "ovs_interfaceid": "e9aeb5b9-eeec-406e-a7d0-6207531ec459", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e8a543dc-e58e-4c74-a763-b795643a2d53", "address": "fa:16:3e:1c:4c:8b", "network": {"id": "a2b42bd2-d4d5-4d9a-9f84-0a6c9158114f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-988988055", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8a543dc-e5", "ovs_interfaceid": "e8a543dc-e58e-4c74-a763-b795643a2d53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.646839] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194420, 'name': CloneVM_Task, 'duration_secs': 1.411898} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.647791] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Created linked-clone VM from snapshot [ 894.648158] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bee319-6be5-431a-87ba-c11b1c31cae8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.658737] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Uploading image 62effbcb-0336-416d-b766-b7800c022151 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 894.692987] env[62627]: DEBUG oslo_vmware.rw_handles [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 894.692987] env[62627]: value = "vm-447670" [ 894.692987] env[62627]: _type = "VirtualMachine" [ 894.692987] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 894.693877] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6a795f13-b751-45f2-969c-cd09bc99260c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.703586] env[62627]: DEBUG oslo_vmware.rw_handles [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lease: (returnval){ [ 894.703586] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52348bf1-7e69-e39f-e8d2-9dc1913c3234" [ 894.703586] env[62627]: _type = "HttpNfcLease" [ 894.703586] env[62627]: } obtained for exporting VM: (result){ [ 894.703586] env[62627]: value = "vm-447670" [ 894.703586] env[62627]: _type = "VirtualMachine" [ 894.703586] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 894.703921] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the lease: (returnval){ [ 894.703921] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52348bf1-7e69-e39f-e8d2-9dc1913c3234" [ 894.703921] env[62627]: _type = "HttpNfcLease" [ 894.703921] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 894.712340] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 894.712340] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52348bf1-7e69-e39f-e8d2-9dc1913c3234" [ 894.712340] env[62627]: _type = "HttpNfcLease" [ 894.712340] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 894.837339] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.765s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.837903] env[62627]: DEBUG nova.compute.manager [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 894.841145] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.470s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.841405] env[62627]: DEBUG nova.objects.instance [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Lazy-loading 'resources' on Instance uuid db8ddb7b-b119-415c-98d9-dc5b3d2868b7 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 894.943392] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194423, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.958682] env[62627]: INFO nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Took 53.04 seconds to build instance. [ 894.964972] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194424, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.992091] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194425, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08269} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.992406] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.993199] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8101268f-051a-4189-961b-6b1670e94bd8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.016320] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] f1b70379-e13d-4007-ac2d-3d2d43cd5602/f1b70379-e13d-4007-ac2d-3d2d43cd5602.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.016320] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f56c8794-b0dd-4466-80a9-a164db864970 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.035949] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 895.035949] env[62627]: value = "task-2194427" [ 895.035949] env[62627]: _type = "Task" [ 895.035949] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.043985] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194427, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.104260] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Releasing lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.104693] env[62627]: DEBUG nova.compute.manager [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Instance network_info: |[{"id": "6bf6cde4-6fe7-4f05-a8ed-b57e621b565b", "address": "fa:16:3e:65:58:4e", "network": {"id": "a2b42bd2-d4d5-4d9a-9f84-0a6c9158114f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-988988055", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bf6cde4-6f", "ovs_interfaceid": "6bf6cde4-6fe7-4f05-a8ed-b57e621b565b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e9aeb5b9-eeec-406e-a7d0-6207531ec459", "address": "fa:16:3e:1c:d1:bb", "network": {"id": "3f097461-afdd-4fac-a30a-555f765c38e6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1093830906", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb18870e-f482-4c7b-8cd4-5c933d3ad294", "external-id": "nsx-vlan-transportzone-76", "segmentation_id": 76, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9aeb5b9-ee", "ovs_interfaceid": "e9aeb5b9-eeec-406e-a7d0-6207531ec459", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e8a543dc-e58e-4c74-a763-b795643a2d53", "address": "fa:16:3e:1c:4c:8b", "network": {"id": "a2b42bd2-d4d5-4d9a-9f84-0a6c9158114f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-988988055", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8a543dc-e5", "ovs_interfaceid": "e8a543dc-e58e-4c74-a763-b795643a2d53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 895.105303] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:58:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6bf6cde4-6fe7-4f05-a8ed-b57e621b565b', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:d1:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb18870e-f482-4c7b-8cd4-5c933d3ad294', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9aeb5b9-eeec-406e-a7d0-6207531ec459', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:4c:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8a543dc-e58e-4c74-a763-b795643a2d53', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 895.117864] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Creating folder: Project (ecf9c58481c14634b2315248118ed1b6). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 895.119672] env[62627]: DEBUG nova.network.neutron [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.122603] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0248997b-896a-45cc-902f-4be3818fc745 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.135585] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Created folder: Project (ecf9c58481c14634b2315248118ed1b6) in parent group-v447541. [ 895.135781] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Creating folder: Instances. Parent ref: group-v447671. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 895.136062] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a44b4bd4-a7ec-4179-8f8d-d4321bc21737 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.147157] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Created folder: Instances in parent group-v447671. [ 895.147414] env[62627]: DEBUG oslo.service.loopingcall [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.149833] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 895.150089] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-584e0fcd-c80a-4958-a977-223b8e738eb7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.178850] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 895.178850] env[62627]: value = "task-2194430" [ 895.178850] env[62627]: _type = "Task" [ 895.178850] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.191582] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194430, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.215398] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 895.215398] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52348bf1-7e69-e39f-e8d2-9dc1913c3234" [ 895.215398] env[62627]: _type = "HttpNfcLease" [ 895.215398] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 895.215713] env[62627]: DEBUG oslo_vmware.rw_handles [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 895.215713] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52348bf1-7e69-e39f-e8d2-9dc1913c3234" [ 895.215713] env[62627]: _type = "HttpNfcLease" [ 895.215713] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 895.218203] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7940b583-cb34-49f3-8d5a-0694a52e2c07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.225069] env[62627]: DEBUG oslo_vmware.rw_handles [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954380-d016-feee-0a09-87a006acf023/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 895.225069] env[62627]: DEBUG oslo_vmware.rw_handles [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954380-d016-feee-0a09-87a006acf023/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 895.323465] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-045ccc95-1f65-4e18-889e-10a8ed6a3cf3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.345247] env[62627]: DEBUG nova.compute.utils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 895.349351] env[62627]: DEBUG nova.compute.manager [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 895.349635] env[62627]: DEBUG nova.network.neutron [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 895.400518] env[62627]: DEBUG nova.compute.manager [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received event network-changed-e8a543dc-e58e-4c74-a763-b795643a2d53 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 895.400822] env[62627]: DEBUG nova.compute.manager [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Refreshing instance network info cache due to event network-changed-e8a543dc-e58e-4c74-a763-b795643a2d53. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 895.401397] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] Acquiring lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.401666] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] Acquired lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.401941] env[62627]: DEBUG nova.network.neutron [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Refreshing network info cache for port e8a543dc-e58e-4c74-a763-b795643a2d53 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.443885] env[62627]: DEBUG oslo_vmware.api [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194423, 'name': RemoveSnapshot_Task, 'duration_secs': 0.739324} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.443885] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 895.469789] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "02216be1-cdb9-421a-8a23-4a919b376595" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.728s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.471134] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194424, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529225} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.476980] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 892fc84b-02a9-4fa0-81b5-80326a060c64/892fc84b-02a9-4fa0-81b5-80326a060c64.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 895.477322] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 895.479367] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66a47002-8920-43f0-8fe2-58204e7466ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.487208] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 895.487208] env[62627]: value = "task-2194431" [ 895.487208] env[62627]: _type = "Task" [ 895.487208] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.500882] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194431, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.519446] env[62627]: DEBUG nova.policy [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ff59ed29f4b04f48b9ac4d7e5dec453a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc28d39b16924be5889b7c027da5f98b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 895.552555] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194427, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.569221] env[62627]: DEBUG nova.network.neutron [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Updating instance_info_cache with network_info: [{"id": "993b71f3-f13a-4db1-b2b7-2da39bd4d41e", "address": "fa:16:3e:32:d4:ef", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993b71f3-f1", "ovs_interfaceid": "993b71f3-f13a-4db1-b2b7-2da39bd4d41e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.695529] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194430, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.850247] env[62627]: DEBUG nova.compute.manager [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 895.861184] env[62627]: DEBUG nova.network.neutron [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Successfully updated port: dca7e38e-83c7-4679-824f-f98d9fc325ad {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 895.956232] env[62627]: WARNING nova.compute.manager [None req-10c167b5-6b06-4d74-93cc-cf039683ff42 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Image not found during snapshot: nova.exception.ImageNotFound: Image 0bd31f80-af13-4889-9031-2891b7400c89 could not be found. [ 895.980436] env[62627]: DEBUG nova.compute.manager [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 896.002321] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194431, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069862} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.002968] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 896.003884] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90411371-8b98-4765-ae36-853ca5588854 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.035371] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 892fc84b-02a9-4fa0-81b5-80326a060c64/892fc84b-02a9-4fa0-81b5-80326a060c64.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 896.037867] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baa7ac8b-50c0-4b10-a3cd-d331d07af1b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.057070] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd35696-d09a-480b-87b2-431028903186 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.072535] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "refresh_cache-e76bdb57-4a30-4222-ad4c-d95e4e6193cb" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.073114] env[62627]: DEBUG nova.compute.manager [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Instance network_info: |[{"id": "993b71f3-f13a-4db1-b2b7-2da39bd4d41e", "address": "fa:16:3e:32:d4:ef", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993b71f3-f1", "ovs_interfaceid": "993b71f3-f13a-4db1-b2b7-2da39bd4d41e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 896.080025] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:d4:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52f465cb-7418-4172-bd7d-aec00abeb692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '993b71f3-f13a-4db1-b2b7-2da39bd4d41e', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.088886] env[62627]: DEBUG oslo.service.loopingcall [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.090080] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194427, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.092028] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 896.093655] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072297f9-caa6-4e8c-b44e-1f124dc41759 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.098437] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 896.098437] env[62627]: value = "task-2194432" [ 896.098437] env[62627]: _type = "Task" [ 896.098437] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.098831] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f127e01b-58ea-4b28-973d-84b4582f6d98 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.163715] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "5d288606-02b6-4d11-b285-7325e48fb5b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.164351] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "5d288606-02b6-4d11-b285-7325e48fb5b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.165598] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ef0577-0403-428f-8206-eb3a50854cbd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.172995] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 896.172995] env[62627]: value = "task-2194433" [ 896.172995] env[62627]: _type = "Task" [ 896.172995] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.175816] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194432, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.195325] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da166fad-6f34-49fa-8e27-f1e008e80887 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.205801] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194433, 'name': CreateVM_Task} progress is 15%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.211494] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194430, 'name': CreateVM_Task, 'duration_secs': 0.817796} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.220631] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 896.221420] env[62627]: DEBUG nova.compute.provider_tree [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.223975] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.224322] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.224933] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 896.225846] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ca504a0-d385-49b9-a12a-f334ea7d2a6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.231595] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 896.231595] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fcbff9-c6f2-f44c-06bb-df5993678bd7" [ 896.231595] env[62627]: _type = "Task" [ 896.231595] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.245228] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fcbff9-c6f2-f44c-06bb-df5993678bd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.311255] env[62627]: DEBUG nova.compute.manager [req-4b100903-f9ba-4ad0-aa85-32e0c6090b1d req-1329c410-1909-4477-90b6-8e33fd2b2737 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Received event network-vif-plugged-dca7e38e-83c7-4679-824f-f98d9fc325ad {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 896.311717] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b100903-f9ba-4ad0-aa85-32e0c6090b1d req-1329c410-1909-4477-90b6-8e33fd2b2737 service nova] Acquiring lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.311991] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b100903-f9ba-4ad0-aa85-32e0c6090b1d req-1329c410-1909-4477-90b6-8e33fd2b2737 service nova] Lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.312290] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b100903-f9ba-4ad0-aa85-32e0c6090b1d req-1329c410-1909-4477-90b6-8e33fd2b2737 service nova] Lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.312585] env[62627]: DEBUG nova.compute.manager [req-4b100903-f9ba-4ad0-aa85-32e0c6090b1d req-1329c410-1909-4477-90b6-8e33fd2b2737 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] No waiting events found dispatching network-vif-plugged-dca7e38e-83c7-4679-824f-f98d9fc325ad {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 896.312744] env[62627]: WARNING nova.compute.manager [req-4b100903-f9ba-4ad0-aa85-32e0c6090b1d req-1329c410-1909-4477-90b6-8e33fd2b2737 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Received unexpected event network-vif-plugged-dca7e38e-83c7-4679-824f-f98d9fc325ad for instance with vm_state building and task_state spawning. [ 896.365396] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "refresh_cache-87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.365822] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "refresh_cache-87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.365981] env[62627]: DEBUG nova.network.neutron [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 896.513745] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.575089] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194427, 'name': ReconfigVM_Task, 'duration_secs': 1.425384} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.576048] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Reconfigured VM instance instance-0000002c to attach disk [datastore1] f1b70379-e13d-4007-ac2d-3d2d43cd5602/f1b70379-e13d-4007-ac2d-3d2d43cd5602.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.576264] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-092ee774-8ad0-49b8-a314-3aebf3bca369 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.583662] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 896.583662] env[62627]: value = "task-2194434" [ 896.583662] env[62627]: _type = "Task" [ 896.583662] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.595879] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194434, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.623709] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194432, 'name': ReconfigVM_Task, 'duration_secs': 0.481364} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.626019] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 892fc84b-02a9-4fa0-81b5-80326a060c64/892fc84b-02a9-4fa0-81b5-80326a060c64.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.626019] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3645312c-f91c-4578-9c39-cf3658f37727 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.634625] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 896.634625] env[62627]: value = "task-2194435" [ 896.634625] env[62627]: _type = "Task" [ 896.634625] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.646278] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194435, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.690488] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194433, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.711792] env[62627]: DEBUG nova.network.neutron [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Updated VIF entry in instance network info cache for port e8a543dc-e58e-4c74-a763-b795643a2d53. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 896.712939] env[62627]: DEBUG nova.network.neutron [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Updating instance_info_cache with network_info: [{"id": "6bf6cde4-6fe7-4f05-a8ed-b57e621b565b", "address": "fa:16:3e:65:58:4e", "network": {"id": "a2b42bd2-d4d5-4d9a-9f84-0a6c9158114f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-988988055", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bf6cde4-6f", "ovs_interfaceid": "6bf6cde4-6fe7-4f05-a8ed-b57e621b565b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e9aeb5b9-eeec-406e-a7d0-6207531ec459", "address": "fa:16:3e:1c:d1:bb", "network": {"id": "3f097461-afdd-4fac-a30a-555f765c38e6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1093830906", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb18870e-f482-4c7b-8cd4-5c933d3ad294", "external-id": "nsx-vlan-transportzone-76", "segmentation_id": 76, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9aeb5b9-ee", "ovs_interfaceid": "e9aeb5b9-eeec-406e-a7d0-6207531ec459", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e8a543dc-e58e-4c74-a763-b795643a2d53", "address": "fa:16:3e:1c:4c:8b", "network": {"id": "a2b42bd2-d4d5-4d9a-9f84-0a6c9158114f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-988988055", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8a543dc-e5", "ovs_interfaceid": "e8a543dc-e58e-4c74-a763-b795643a2d53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.728712] env[62627]: DEBUG nova.scheduler.client.report [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 896.737204] env[62627]: DEBUG nova.network.neutron [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Successfully created port: 7c970a80-e9f6-4f52-944e-27b8cc62ad4b {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 896.756973] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fcbff9-c6f2-f44c-06bb-df5993678bd7, 'name': SearchDatastore_Task, 'duration_secs': 0.020437} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.757664] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.758264] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.758717] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.759087] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.759399] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 896.762285] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-edd25325-cbd8-491e-88f8-d916254ccd25 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.771028] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 896.771028] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 896.771957] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cea90c18-0d6a-4f7f-a79e-59ba1862016c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.778169] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 896.778169] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c92370-2664-5299-09b5-5612a3c50fe4" [ 896.778169] env[62627]: _type = "Task" [ 896.778169] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.787267] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c92370-2664-5299-09b5-5612a3c50fe4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.865520] env[62627]: DEBUG nova.compute.manager [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 896.900034] env[62627]: DEBUG nova.virt.hardware [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 896.900543] env[62627]: DEBUG nova.virt.hardware [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.900647] env[62627]: DEBUG nova.virt.hardware [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 896.900978] env[62627]: DEBUG nova.virt.hardware [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.901528] env[62627]: DEBUG nova.virt.hardware [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 896.901825] env[62627]: DEBUG nova.virt.hardware [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 896.902753] env[62627]: DEBUG nova.virt.hardware [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 896.903121] env[62627]: DEBUG nova.virt.hardware [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 896.903396] env[62627]: DEBUG nova.virt.hardware [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 896.903796] env[62627]: DEBUG nova.virt.hardware [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 896.904114] env[62627]: DEBUG nova.virt.hardware [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 896.905050] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e27250-4b9d-4179-89e1-b1fcd8af5a42 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.914621] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fb5597-bbe3-4db8-9197-d9262f135366 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.094904] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194434, 'name': Rename_Task, 'duration_secs': 0.156111} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.095592] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.095592] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4aaa214f-cf80-4b0f-9ed7-a10fa771cb2d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.108234] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 897.108234] env[62627]: value = "task-2194436" [ 897.108234] env[62627]: _type = "Task" [ 897.108234] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.122824] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194436, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.147387] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194435, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.152523] env[62627]: DEBUG nova.network.neutron [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.188557] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194433, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.216978] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] Releasing lock "refresh_cache-574293e3-8ec8-410f-8a30-83bc60d25d64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.218126] env[62627]: DEBUG nova.compute.manager [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Received event network-vif-plugged-993b71f3-f13a-4db1-b2b7-2da39bd4d41e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 897.218126] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] Acquiring lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.218126] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] Lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.218126] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] Lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.218126] env[62627]: DEBUG nova.compute.manager [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] No waiting events found dispatching network-vif-plugged-993b71f3-f13a-4db1-b2b7-2da39bd4d41e {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 897.218126] env[62627]: WARNING nova.compute.manager [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Received unexpected event network-vif-plugged-993b71f3-f13a-4db1-b2b7-2da39bd4d41e for instance with vm_state building and task_state spawning. [ 897.218401] env[62627]: DEBUG nova.compute.manager [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Received event network-changed-993b71f3-f13a-4db1-b2b7-2da39bd4d41e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 897.218487] env[62627]: DEBUG nova.compute.manager [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Refreshing instance network info cache due to event network-changed-993b71f3-f13a-4db1-b2b7-2da39bd4d41e. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 897.218908] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] Acquiring lock "refresh_cache-e76bdb57-4a30-4222-ad4c-d95e4e6193cb" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.218908] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] Acquired lock "refresh_cache-e76bdb57-4a30-4222-ad4c-d95e4e6193cb" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.219249] env[62627]: DEBUG nova.network.neutron [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Refreshing network info cache for port 993b71f3-f13a-4db1-b2b7-2da39bd4d41e {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 897.246226] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.403s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.247142] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.866s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.247549] env[62627]: DEBUG nova.objects.instance [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Lazy-loading 'resources' on Instance uuid 5f0240da-bec1-4bf3-a975-6ed3d0630a2d {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.275889] env[62627]: INFO nova.scheduler.client.report [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Deleted allocations for instance db8ddb7b-b119-415c-98d9-dc5b3d2868b7 [ 897.289803] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c92370-2664-5299-09b5-5612a3c50fe4, 'name': SearchDatastore_Task, 'duration_secs': 0.013421} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.290624] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49199f34-c0b7-46c7-af67-aaa28d827284 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.296661] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 897.296661] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a1463c-226c-017b-63fd-e3bc1553cf69" [ 897.296661] env[62627]: _type = "Task" [ 897.296661] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.304900] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a1463c-226c-017b-63fd-e3bc1553cf69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.453934] env[62627]: DEBUG nova.network.neutron [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Updating instance_info_cache with network_info: [{"id": "dca7e38e-83c7-4679-824f-f98d9fc325ad", "address": "fa:16:3e:7c:d9:61", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdca7e38e-83", "ovs_interfaceid": "dca7e38e-83c7-4679-824f-f98d9fc325ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.620579] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194436, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.645349] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194435, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.688336] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194433, 'name': CreateVM_Task, 'duration_secs': 1.403576} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.688451] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 897.689114] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.689282] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.689613] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 897.689877] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8b2f61f-4bbf-46b3-8184-27a477415e52 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.699615] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 897.699615] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cce55d-cfa0-36d1-7179-a2e8ecdb0838" [ 897.699615] env[62627]: _type = "Task" [ 897.699615] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.711636] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cce55d-cfa0-36d1-7179-a2e8ecdb0838, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.787112] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4df07c0a-12f0-413c-90d3-aaa61c1e559a tempest-VolumesAssistedSnapshotsTest-1178161758 tempest-VolumesAssistedSnapshotsTest-1178161758-project-member] Lock "db8ddb7b-b119-415c-98d9-dc5b3d2868b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.028s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.808753] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a1463c-226c-017b-63fd-e3bc1553cf69, 'name': SearchDatastore_Task, 'duration_secs': 0.012581} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.810512] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.810512] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 574293e3-8ec8-410f-8a30-83bc60d25d64/574293e3-8ec8-410f-8a30-83bc60d25d64.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 897.810512] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-147fb2f2-e29b-4364-bc27-9162639d3246 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.821534] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 897.821534] env[62627]: value = "task-2194437" [ 897.821534] env[62627]: _type = "Task" [ 897.821534] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.830526] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.831274] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.831274] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.831274] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.831497] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.834485] env[62627]: INFO nova.compute.manager [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Terminating instance [ 897.845147] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.961071] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "refresh_cache-87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.961780] env[62627]: DEBUG nova.compute.manager [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Instance network_info: |[{"id": "dca7e38e-83c7-4679-824f-f98d9fc325ad", "address": "fa:16:3e:7c:d9:61", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdca7e38e-83", "ovs_interfaceid": "dca7e38e-83c7-4679-824f-f98d9fc325ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 897.962432] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:d9:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dca7e38e-83c7-4679-824f-f98d9fc325ad', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 897.974254] env[62627]: DEBUG oslo.service.loopingcall [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 897.974254] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 897.974254] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08dc2403-3dd1-4bab-a024-1db5ea85af38 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.998987] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 897.998987] env[62627]: value = "task-2194438" [ 897.998987] env[62627]: _type = "Task" [ 897.998987] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.010470] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194438, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.094667] env[62627]: DEBUG nova.network.neutron [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Updated VIF entry in instance network info cache for port 993b71f3-f13a-4db1-b2b7-2da39bd4d41e. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 898.095217] env[62627]: DEBUG nova.network.neutron [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Updating instance_info_cache with network_info: [{"id": "993b71f3-f13a-4db1-b2b7-2da39bd4d41e", "address": "fa:16:3e:32:d4:ef", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993b71f3-f1", "ovs_interfaceid": "993b71f3-f13a-4db1-b2b7-2da39bd4d41e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.123967] env[62627]: DEBUG oslo_vmware.api [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194436, 'name': PowerOnVM_Task, 'duration_secs': 0.691524} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.125290] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 898.125290] env[62627]: INFO nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Took 12.18 seconds to spawn the instance on the hypervisor. [ 898.125290] env[62627]: DEBUG nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 898.126201] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5fb5278-7378-4bd8-8230-85a0770be430 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.151599] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194435, 'name': Rename_Task, 'duration_secs': 1.040369} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.154531] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 898.156160] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e1a457e-0cf7-4f87-a8d6-c98d6a7317cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.165244] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 898.165244] env[62627]: value = "task-2194439" [ 898.165244] env[62627]: _type = "Task" [ 898.165244] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.177243] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194439, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.217122] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cce55d-cfa0-36d1-7179-a2e8ecdb0838, 'name': SearchDatastore_Task, 'duration_secs': 0.011446} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.220172] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.220573] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 898.220948] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.221085] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.221387] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 898.222743] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e728017-2d0f-48f6-9299-5d9e4d854229 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.232361] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 898.232927] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 898.237758] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-195f6e1f-c296-4b0a-8960-e2da94ad17e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.245151] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 898.245151] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52477382-d0ed-f1dd-1311-849948bac245" [ 898.245151] env[62627]: _type = "Task" [ 898.245151] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.258107] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52477382-d0ed-f1dd-1311-849948bac245, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.334601] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194437, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.340927] env[62627]: DEBUG nova.compute.manager [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 898.341234] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 898.342511] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb777ec-a522-4491-94ee-71cf320a6a2b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.350708] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 898.350708] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c39f7381-f932-4b6c-84d1-cfaa1f2e7aaf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.361261] env[62627]: DEBUG oslo_vmware.api [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 898.361261] env[62627]: value = "task-2194440" [ 898.361261] env[62627]: _type = "Task" [ 898.361261] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.371741] env[62627]: DEBUG oslo_vmware.api [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194440, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.439619] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7115ada8-9558-4da5-9b15-e01802a51033 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.449653] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e945bc-d465-4ca6-8bfa-941264e94135 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.487656] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b62c17-5994-4fb1-93b7-8984b0f345f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.494531] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb0117a-7ffe-4cad-ba77-f29f18dd5c50 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.513234] env[62627]: DEBUG nova.compute.provider_tree [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.523317] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194438, 'name': CreateVM_Task, 'duration_secs': 0.427799} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.523317] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 898.523516] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.523674] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.523987] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 898.524313] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfa1d501-be51-48ed-aeed-c6ef19f47280 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.532024] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 898.532024] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520d0332-b497-41de-8280-6c5ba17f3021" [ 898.532024] env[62627]: _type = "Task" [ 898.532024] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.537363] env[62627]: DEBUG nova.compute.manager [req-7a89a8a6-e1f6-4b2d-8701-04bbaa30b2d5 req-57dce54d-81f2-4284-a90a-e22ff4c8eed1 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Received event network-changed-dca7e38e-83c7-4679-824f-f98d9fc325ad {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 898.537547] env[62627]: DEBUG nova.compute.manager [req-7a89a8a6-e1f6-4b2d-8701-04bbaa30b2d5 req-57dce54d-81f2-4284-a90a-e22ff4c8eed1 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Refreshing instance network info cache due to event network-changed-dca7e38e-83c7-4679-824f-f98d9fc325ad. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 898.537753] env[62627]: DEBUG oslo_concurrency.lockutils [req-7a89a8a6-e1f6-4b2d-8701-04bbaa30b2d5 req-57dce54d-81f2-4284-a90a-e22ff4c8eed1 service nova] Acquiring lock "refresh_cache-87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.537896] env[62627]: DEBUG oslo_concurrency.lockutils [req-7a89a8a6-e1f6-4b2d-8701-04bbaa30b2d5 req-57dce54d-81f2-4284-a90a-e22ff4c8eed1 service nova] Acquired lock "refresh_cache-87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.538063] env[62627]: DEBUG nova.network.neutron [req-7a89a8a6-e1f6-4b2d-8701-04bbaa30b2d5 req-57dce54d-81f2-4284-a90a-e22ff4c8eed1 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Refreshing network info cache for port dca7e38e-83c7-4679-824f-f98d9fc325ad {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.544673] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520d0332-b497-41de-8280-6c5ba17f3021, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.598161] env[62627]: DEBUG oslo_concurrency.lockutils [req-ed9e230a-04d1-4238-9ee2-14d34355e330 req-da357540-e56c-4201-a5df-af5b77e44952 service nova] Releasing lock "refresh_cache-e76bdb57-4a30-4222-ad4c-d95e4e6193cb" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.654451] env[62627]: INFO nova.compute.manager [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Took 51.63 seconds to build instance. [ 898.682654] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194439, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.758120] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52477382-d0ed-f1dd-1311-849948bac245, 'name': SearchDatastore_Task, 'duration_secs': 0.02428} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.759013] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-423f8e1e-6632-495b-9e00-456fc50ef97e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.765013] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 898.765013] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e401cf-13b7-63fa-0225-57e075d97d55" [ 898.765013] env[62627]: _type = "Task" [ 898.765013] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.776480] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e401cf-13b7-63fa-0225-57e075d97d55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.836736] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194437, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.621771} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.836858] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 574293e3-8ec8-410f-8a30-83bc60d25d64/574293e3-8ec8-410f-8a30-83bc60d25d64.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 898.836974] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 898.838019] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f6ccaa0-b6f9-4984-a425-689de2fd6f41 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.844891] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 898.844891] env[62627]: value = "task-2194441" [ 898.844891] env[62627]: _type = "Task" [ 898.844891] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.860078] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194441, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.871829] env[62627]: DEBUG oslo_vmware.api [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194440, 'name': PowerOffVM_Task, 'duration_secs': 0.25433} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.873451] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 898.873451] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 898.873451] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8754d3fc-de3b-4275-8de8-746001e8f870 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.018483] env[62627]: DEBUG nova.scheduler.client.report [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 899.043386] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520d0332-b497-41de-8280-6c5ba17f3021, 'name': SearchDatastore_Task, 'duration_secs': 0.022069} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.045550] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.045550] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.045550] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.158322] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1eb60e4c-b8cc-4684-ab98-b93d27b9eb65 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.388s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.181338] env[62627]: DEBUG oslo_vmware.api [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194439, 'name': PowerOnVM_Task, 'duration_secs': 0.679182} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.181680] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.181937] env[62627]: INFO nova.compute.manager [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Took 10.49 seconds to spawn the instance on the hypervisor. [ 899.182255] env[62627]: DEBUG nova.compute.manager [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 899.185755] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca37f0e-3077-4587-b3dd-b112f821bf69 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.196987] env[62627]: DEBUG nova.network.neutron [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Successfully updated port: 7c970a80-e9f6-4f52-944e-27b8cc62ad4b {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 899.279548] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e401cf-13b7-63fa-0225-57e075d97d55, 'name': SearchDatastore_Task, 'duration_secs': 0.017266} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.283705] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.283933] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] e76bdb57-4a30-4222-ad4c-d95e4e6193cb/e76bdb57-4a30-4222-ad4c-d95e4e6193cb.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 899.284373] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.284558] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.285169] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca8b38c3-97f8-4018-b4b0-b6269201d8ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.288033] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0026c03d-dd6a-474b-8d05-e291996bab7b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.296897] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 899.296897] env[62627]: value = "task-2194443" [ 899.296897] env[62627]: _type = "Task" [ 899.296897] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.305211] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.305211] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.310743] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e66a25ac-3025-4749-a0b2-af2afb44e5c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.320868] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.320868] env[62627]: DEBUG nova.compute.manager [req-7a05237c-08c5-4dea-8f52-f2edef569d82 req-f5aecedd-0fdc-4abf-96e6-58c24609e714 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Received event network-vif-plugged-7c970a80-e9f6-4f52-944e-27b8cc62ad4b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 899.320868] env[62627]: DEBUG oslo_concurrency.lockutils [req-7a05237c-08c5-4dea-8f52-f2edef569d82 req-f5aecedd-0fdc-4abf-96e6-58c24609e714 service nova] Acquiring lock "c9088ac9-456b-4c92-8773-79c263e7cc2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.320868] env[62627]: DEBUG oslo_concurrency.lockutils [req-7a05237c-08c5-4dea-8f52-f2edef569d82 req-f5aecedd-0fdc-4abf-96e6-58c24609e714 service nova] Lock "c9088ac9-456b-4c92-8773-79c263e7cc2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.320868] env[62627]: DEBUG oslo_concurrency.lockutils [req-7a05237c-08c5-4dea-8f52-f2edef569d82 req-f5aecedd-0fdc-4abf-96e6-58c24609e714 service nova] Lock "c9088ac9-456b-4c92-8773-79c263e7cc2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.320868] env[62627]: DEBUG nova.compute.manager [req-7a05237c-08c5-4dea-8f52-f2edef569d82 req-f5aecedd-0fdc-4abf-96e6-58c24609e714 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] No waiting events found dispatching network-vif-plugged-7c970a80-e9f6-4f52-944e-27b8cc62ad4b {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 899.320868] env[62627]: WARNING nova.compute.manager [req-7a05237c-08c5-4dea-8f52-f2edef569d82 req-f5aecedd-0fdc-4abf-96e6-58c24609e714 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Received unexpected event network-vif-plugged-7c970a80-e9f6-4f52-944e-27b8cc62ad4b for instance with vm_state building and task_state spawning. [ 899.322582] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 899.322582] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d9a58f-5fed-fa00-4081-c82cfbe45024" [ 899.322582] env[62627]: _type = "Task" [ 899.322582] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.331753] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d9a58f-5fed-fa00-4081-c82cfbe45024, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.342046] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 899.342046] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 899.342288] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Deleting the datastore file [datastore1] 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 899.342408] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb0f7779-0591-47c9-a04e-a7e015b56848 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.351413] env[62627]: DEBUG oslo_vmware.api [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 899.351413] env[62627]: value = "task-2194444" [ 899.351413] env[62627]: _type = "Task" [ 899.351413] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.358355] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194441, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076634} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.359024] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 899.360161] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a417e7a4-384b-4850-ac1d-50fcf837c558 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.370032] env[62627]: DEBUG oslo_vmware.api [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194444, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.401847] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 574293e3-8ec8-410f-8a30-83bc60d25d64/574293e3-8ec8-410f-8a30-83bc60d25d64.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 899.402249] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83bf1b10-cdd5-43f4-b025-45c454d9585f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.422143] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 899.422143] env[62627]: value = "task-2194445" [ 899.422143] env[62627]: _type = "Task" [ 899.422143] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.431419] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194445, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.454830] env[62627]: DEBUG nova.network.neutron [req-7a89a8a6-e1f6-4b2d-8701-04bbaa30b2d5 req-57dce54d-81f2-4284-a90a-e22ff4c8eed1 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Updated VIF entry in instance network info cache for port dca7e38e-83c7-4679-824f-f98d9fc325ad. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.454830] env[62627]: DEBUG nova.network.neutron [req-7a89a8a6-e1f6-4b2d-8701-04bbaa30b2d5 req-57dce54d-81f2-4284-a90a-e22ff4c8eed1 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Updating instance_info_cache with network_info: [{"id": "dca7e38e-83c7-4679-824f-f98d9fc325ad", "address": "fa:16:3e:7c:d9:61", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdca7e38e-83", "ovs_interfaceid": "dca7e38e-83c7-4679-824f-f98d9fc325ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.528162] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.281s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.534021] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.108s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.534832] env[62627]: INFO nova.compute.claims [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.561531] env[62627]: INFO nova.scheduler.client.report [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Deleted allocations for instance 5f0240da-bec1-4bf3-a975-6ed3d0630a2d [ 899.663127] env[62627]: DEBUG nova.compute.manager [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 899.704248] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquiring lock "refresh_cache-c9088ac9-456b-4c92-8773-79c263e7cc2a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.704420] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquired lock "refresh_cache-c9088ac9-456b-4c92-8773-79c263e7cc2a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.704598] env[62627]: DEBUG nova.network.neutron [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 899.709264] env[62627]: INFO nova.compute.manager [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Took 50.57 seconds to build instance. [ 899.808839] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194443, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.835410] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d9a58f-5fed-fa00-4081-c82cfbe45024, 'name': SearchDatastore_Task, 'duration_secs': 0.014168} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.836439] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1783ec71-78fe-466e-9400-f798e2bb7ce3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.843803] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 899.843803] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d40a63-871a-5c28-be61-a76d2c5576c8" [ 899.843803] env[62627]: _type = "Task" [ 899.843803] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.853602] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d40a63-871a-5c28-be61-a76d2c5576c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.863763] env[62627]: DEBUG oslo_vmware.api [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194444, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285582} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.864029] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 899.864287] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 899.864435] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 899.864622] env[62627]: INFO nova.compute.manager [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Took 1.52 seconds to destroy the instance on the hypervisor. [ 899.864907] env[62627]: DEBUG oslo.service.loopingcall [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 899.865203] env[62627]: DEBUG nova.compute.manager [-] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 899.865316] env[62627]: DEBUG nova.network.neutron [-] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 899.934592] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194445, 'name': ReconfigVM_Task, 'duration_secs': 0.404471} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.934958] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 574293e3-8ec8-410f-8a30-83bc60d25d64/574293e3-8ec8-410f-8a30-83bc60d25d64.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 899.936042] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7fd71f4-3781-42b6-a8c1-f8da7ba5bdb7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.944997] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 899.944997] env[62627]: value = "task-2194446" [ 899.944997] env[62627]: _type = "Task" [ 899.944997] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.958690] env[62627]: DEBUG oslo_concurrency.lockutils [req-7a89a8a6-e1f6-4b2d-8701-04bbaa30b2d5 req-57dce54d-81f2-4284-a90a-e22ff4c8eed1 service nova] Releasing lock "refresh_cache-87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.959222] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194446, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.069910] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67ae8d3c-a51a-44e1-87e6-08c82ce6c4f0 tempest-ServerExternalEventsTest-1806940606 tempest-ServerExternalEventsTest-1806940606-project-member] Lock "5f0240da-bec1-4bf3-a975-6ed3d0630a2d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.795s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.192748] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.212321] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bf808392-fe5f-4b0a-b43d-58624ba737b7 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "892fc84b-02a9-4fa0-81b5-80326a060c64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.461s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.261321] env[62627]: DEBUG oslo_concurrency.lockutils [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "02216be1-cdb9-421a-8a23-4a919b376595" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.261582] env[62627]: DEBUG oslo_concurrency.lockutils [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "02216be1-cdb9-421a-8a23-4a919b376595" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.261788] env[62627]: DEBUG oslo_concurrency.lockutils [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "02216be1-cdb9-421a-8a23-4a919b376595-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.261966] env[62627]: DEBUG oslo_concurrency.lockutils [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "02216be1-cdb9-421a-8a23-4a919b376595-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.262197] env[62627]: DEBUG oslo_concurrency.lockutils [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "02216be1-cdb9-421a-8a23-4a919b376595-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.264734] env[62627]: INFO nova.compute.manager [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Terminating instance [ 900.310458] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194443, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699339} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.310849] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] e76bdb57-4a30-4222-ad4c-d95e4e6193cb/e76bdb57-4a30-4222-ad4c-d95e4e6193cb.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 900.310980] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 900.311299] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e288456a-9a23-4f8e-9a0d-58a50e01e5fc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.320872] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 900.320872] env[62627]: value = "task-2194447" [ 900.320872] env[62627]: _type = "Task" [ 900.320872] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.326108] env[62627]: DEBUG nova.network.neutron [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.334478] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194447, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.358354] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d40a63-871a-5c28-be61-a76d2c5576c8, 'name': SearchDatastore_Task, 'duration_secs': 0.061992} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.358648] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.358935] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218/87bc4af9-7cbc-4797-8aaa-cf5f8ba51218.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.359301] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-245feb11-9bc8-41d1-a166-eec65ba5f0b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.365880] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 900.365880] env[62627]: value = "task-2194448" [ 900.365880] env[62627]: _type = "Task" [ 900.365880] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.376054] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.457783] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194446, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.717383] env[62627]: DEBUG nova.compute.manager [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 900.769188] env[62627]: DEBUG nova.compute.manager [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 900.769188] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.769785] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add76069-c335-49cf-b25a-8082e37aec97 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.782875] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.786147] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ee0dec7-c3f2-4442-b07c-ded91fd0523a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.794193] env[62627]: DEBUG oslo_vmware.api [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 900.794193] env[62627]: value = "task-2194449" [ 900.794193] env[62627]: _type = "Task" [ 900.794193] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.813622] env[62627]: DEBUG oslo_vmware.api [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194449, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.831809] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194447, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074042} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.836626] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 900.839283] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb33417d-15db-42bb-b2f4-3a61a60c28ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.869049] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] e76bdb57-4a30-4222-ad4c-d95e4e6193cb/e76bdb57-4a30-4222-ad4c-d95e4e6193cb.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 900.872680] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68431ece-e183-4ba2-a035-704e4f9c4153 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.898919] env[62627]: DEBUG nova.network.neutron [-] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.912030] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194448, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.912030] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 900.912030] env[62627]: value = "task-2194450" [ 900.912030] env[62627]: _type = "Task" [ 900.912030] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.921408] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194450, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.926573] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.926996] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.927410] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.927622] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.927944] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.930989] env[62627]: INFO nova.compute.manager [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Terminating instance [ 900.965830] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194446, 'name': Rename_Task, 'duration_secs': 0.987457} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.966838] env[62627]: DEBUG nova.network.neutron [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Updating instance_info_cache with network_info: [{"id": "7c970a80-e9f6-4f52-944e-27b8cc62ad4b", "address": "fa:16:3e:90:3a:fe", "network": {"id": "16be9b2c-9bf7-4e25-9a55-da270442b8f0", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1281567126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc28d39b16924be5889b7c027da5f98b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c970a80-e9", "ovs_interfaceid": "7c970a80-e9f6-4f52-944e-27b8cc62ad4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.968233] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 900.968510] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96a1a4a3-af83-4d76-959b-8a07558267f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.979157] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 900.979157] env[62627]: value = "task-2194451" [ 900.979157] env[62627]: _type = "Task" [ 900.979157] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.991122] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194451, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.196578] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a9bf9f-c1aa-4063-883f-2cbd99ed8ece {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.205918] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd4f6d5-88d0-4741-948b-7fd9262324e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.247290] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6925526-c97f-461d-8b54-647aa0712ba6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.250785] env[62627]: DEBUG nova.compute.manager [req-d7bc2501-5f8f-4f1d-b391-f1fde6cd3539 req-a52277bb-d4ea-4091-89cd-201674bf21ac service nova] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Received event network-vif-deleted-4eb23b03-8a91-47c9-8220-f878ec2488f3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 901.258543] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52ba17b-befd-497d-9aca-b79a710fe493 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.276797] env[62627]: DEBUG nova.compute.provider_tree [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.279455] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.307242] env[62627]: DEBUG oslo_vmware.api [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194449, 'name': PowerOffVM_Task, 'duration_secs': 0.269682} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.307242] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 901.307346] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 901.307635] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56124386-87b0-48c7-aaaf-4790f11ba5dc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.380194] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 901.380194] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 901.380480] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleting the datastore file [datastore2] 02216be1-cdb9-421a-8a23-4a919b376595 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 901.381633] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b67a285c-669a-4e7c-bc72-e29e04679db1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.386944] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194448, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.741996} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.387452] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218/87bc4af9-7cbc-4797-8aaa-cf5f8ba51218.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.387717] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.388682] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-682ad023-175b-47d8-b82c-abfb6dbf1451 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.392319] env[62627]: DEBUG oslo_vmware.api [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 901.392319] env[62627]: value = "task-2194453" [ 901.392319] env[62627]: _type = "Task" [ 901.392319] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.397875] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 901.397875] env[62627]: value = "task-2194454" [ 901.397875] env[62627]: _type = "Task" [ 901.397875] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.406866] env[62627]: INFO nova.compute.manager [-] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Took 1.54 seconds to deallocate network for instance. [ 901.407267] env[62627]: DEBUG oslo_vmware.api [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.419980] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194454, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.428814] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.441086] env[62627]: DEBUG nova.compute.manager [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 901.441086] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 901.441086] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04538093-6d66-45ae-bcb6-fed82288ffd5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.448619] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 901.449230] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d66b3c6-fee8-4637-9e3f-9e7f6ba60452 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.457396] env[62627]: DEBUG oslo_vmware.api [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 901.457396] env[62627]: value = "task-2194455" [ 901.457396] env[62627]: _type = "Task" [ 901.457396] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.476747] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Releasing lock "refresh_cache-c9088ac9-456b-4c92-8773-79c263e7cc2a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.476747] env[62627]: DEBUG nova.compute.manager [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Instance network_info: |[{"id": "7c970a80-e9f6-4f52-944e-27b8cc62ad4b", "address": "fa:16:3e:90:3a:fe", "network": {"id": "16be9b2c-9bf7-4e25-9a55-da270442b8f0", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1281567126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc28d39b16924be5889b7c027da5f98b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c970a80-e9", "ovs_interfaceid": "7c970a80-e9f6-4f52-944e-27b8cc62ad4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 901.476747] env[62627]: DEBUG oslo_vmware.api [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.476747] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:3a:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c970a80-e9f6-4f52-944e-27b8cc62ad4b', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.485514] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Creating folder: Project (cc28d39b16924be5889b7c027da5f98b). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 901.487509] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ec421c5-8d0f-4a43-9f7f-72179e8c54ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.501731] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194451, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.504358] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Created folder: Project (cc28d39b16924be5889b7c027da5f98b) in parent group-v447541. [ 901.504358] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Creating folder: Instances. Parent ref: group-v447676. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 901.504358] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e96f92e4-8ffc-4cd0-a4d3-13f5799573bf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.517049] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Created folder: Instances in parent group-v447676. [ 901.517425] env[62627]: DEBUG oslo.service.loopingcall [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.517736] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 901.518244] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3970766-37f5-4dbf-a99f-0a50f23f25ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.548536] env[62627]: DEBUG nova.compute.manager [req-37968747-8747-40fd-ab8b-1f2627822238 req-4fd7111f-da49-49d8-88a2-35602b00a866 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Received event network-changed-7c970a80-e9f6-4f52-944e-27b8cc62ad4b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 901.548536] env[62627]: DEBUG nova.compute.manager [req-37968747-8747-40fd-ab8b-1f2627822238 req-4fd7111f-da49-49d8-88a2-35602b00a866 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Refreshing instance network info cache due to event network-changed-7c970a80-e9f6-4f52-944e-27b8cc62ad4b. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 901.549025] env[62627]: DEBUG oslo_concurrency.lockutils [req-37968747-8747-40fd-ab8b-1f2627822238 req-4fd7111f-da49-49d8-88a2-35602b00a866 service nova] Acquiring lock "refresh_cache-c9088ac9-456b-4c92-8773-79c263e7cc2a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.549025] env[62627]: DEBUG oslo_concurrency.lockutils [req-37968747-8747-40fd-ab8b-1f2627822238 req-4fd7111f-da49-49d8-88a2-35602b00a866 service nova] Acquired lock "refresh_cache-c9088ac9-456b-4c92-8773-79c263e7cc2a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.549339] env[62627]: DEBUG nova.network.neutron [req-37968747-8747-40fd-ab8b-1f2627822238 req-4fd7111f-da49-49d8-88a2-35602b00a866 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Refreshing network info cache for port 7c970a80-e9f6-4f52-944e-27b8cc62ad4b {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.556644] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.556644] env[62627]: value = "task-2194458" [ 901.556644] env[62627]: _type = "Task" [ 901.556644] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.570556] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194458, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.780662] env[62627]: DEBUG nova.scheduler.client.report [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 901.908823] env[62627]: DEBUG oslo_vmware.api [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302624} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.909884] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 901.910742] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 901.910742] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 901.910742] env[62627]: INFO nova.compute.manager [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Took 1.14 seconds to destroy the instance on the hypervisor. [ 901.911014] env[62627]: DEBUG oslo.service.loopingcall [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.914933] env[62627]: DEBUG nova.compute.manager [-] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 901.915024] env[62627]: DEBUG nova.network.neutron [-] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 901.917410] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194454, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072055} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.921633] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 901.925043] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.926591] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9166d07b-3e41-4907-9eff-d77e9c0f89bd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.937149] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194450, 'name': ReconfigVM_Task, 'duration_secs': 0.645581} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.955027] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Reconfigured VM instance instance-0000002e to attach disk [datastore1] e76bdb57-4a30-4222-ad4c-d95e4e6193cb/e76bdb57-4a30-4222-ad4c-d95e4e6193cb.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 901.966839] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218/87bc4af9-7cbc-4797-8aaa-cf5f8ba51218.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 901.970495] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d1d092d-7d60-4328-8aa6-cad4f8bacdf5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.972486] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c2c518c-3a17-4a4e-8a97-ea6e36c7d2e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.001291] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 902.001291] env[62627]: value = "task-2194459" [ 902.001291] env[62627]: _type = "Task" [ 902.001291] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.013690] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 902.013690] env[62627]: value = "task-2194460" [ 902.013690] env[62627]: _type = "Task" [ 902.013690] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.013963] env[62627]: DEBUG oslo_vmware.api [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194451, 'name': PowerOnVM_Task, 'duration_secs': 0.56204} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.014195] env[62627]: DEBUG oslo_vmware.api [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194455, 'name': PowerOffVM_Task, 'duration_secs': 0.25613} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.015021] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 902.015266] env[62627]: INFO nova.compute.manager [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Took 21.61 seconds to spawn the instance on the hypervisor. [ 902.015473] env[62627]: DEBUG nova.compute.manager [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 902.015744] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 902.015918] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 902.019791] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3807439-f0c3-4cbf-8c73-18c7a6b92a9f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.023464] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bebeb438-2ca5-4fa7-a673-c52e299f4c31 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.029344] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194459, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.035418] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194460, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.067924] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194458, 'name': CreateVM_Task, 'duration_secs': 0.397013} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.068103] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 902.068891] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.069080] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.069443] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 902.069743] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-368dceaf-540c-4e51-bd9c-f1fe9cd1bc8b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.074926] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for the task: (returnval){ [ 902.074926] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529f925f-0ec3-9942-6dda-94dde7eee014" [ 902.074926] env[62627]: _type = "Task" [ 902.074926] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.084892] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529f925f-0ec3-9942-6dda-94dde7eee014, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.095643] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 902.095924] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 902.096146] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleting the datastore file [datastore1] f1b70379-e13d-4007-ac2d-3d2d43cd5602 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.096445] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-024fe82c-6eff-4f81-a4a7-f2cd53bd9eb9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.104513] env[62627]: DEBUG oslo_vmware.api [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for the task: (returnval){ [ 902.104513] env[62627]: value = "task-2194462" [ 902.104513] env[62627]: _type = "Task" [ 902.104513] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.113076] env[62627]: DEBUG oslo_vmware.api [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.287670] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.755s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.288320] env[62627]: DEBUG nova.compute.manager [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 902.293123] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.239s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.297017] env[62627]: INFO nova.compute.claims [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 902.514147] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194459, 'name': Rename_Task, 'duration_secs': 0.212037} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.514559] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 902.514799] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f23f55aa-ae3e-4303-ad36-cb03d680dbc1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.526899] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194460, 'name': ReconfigVM_Task, 'duration_secs': 0.421545} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.528143] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218/87bc4af9-7cbc-4797-8aaa-cf5f8ba51218.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.528919] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 902.528919] env[62627]: value = "task-2194463" [ 902.528919] env[62627]: _type = "Task" [ 902.528919] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.529132] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df0069b9-6b4f-4657-90ca-90d12bd2e877 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.544428] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194463, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.550591] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 902.550591] env[62627]: value = "task-2194464" [ 902.550591] env[62627]: _type = "Task" [ 902.550591] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.557099] env[62627]: INFO nova.compute.manager [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Took 62.73 seconds to build instance. [ 902.568409] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194464, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.587605] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529f925f-0ec3-9942-6dda-94dde7eee014, 'name': SearchDatastore_Task, 'duration_secs': 0.014721} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.588230] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.588543] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.588841] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.589189] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.589308] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.589629] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d20a6fd3-398e-42b8-a786-91a8cfdd10c7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.606335] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.606639] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 902.611194] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1442f3d-56c6-47f3-a48e-b94c125305fb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.620221] env[62627]: DEBUG oslo_vmware.api [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Task: {'id': task-2194462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31575} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.623579] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 902.623660] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 902.623845] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 902.623995] env[62627]: INFO nova.compute.manager [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Took 1.19 seconds to destroy the instance on the hypervisor. [ 902.624282] env[62627]: DEBUG oslo.service.loopingcall [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 902.624630] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for the task: (returnval){ [ 902.624630] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522e41c0-083f-31ca-e6d5-c9a1f6b27380" [ 902.624630] env[62627]: _type = "Task" [ 902.624630] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.624870] env[62627]: DEBUG nova.compute.manager [-] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 902.624969] env[62627]: DEBUG nova.network.neutron [-] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 902.637830] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522e41c0-083f-31ca-e6d5-c9a1f6b27380, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.752953] env[62627]: DEBUG nova.network.neutron [req-37968747-8747-40fd-ab8b-1f2627822238 req-4fd7111f-da49-49d8-88a2-35602b00a866 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Updated VIF entry in instance network info cache for port 7c970a80-e9f6-4f52-944e-27b8cc62ad4b. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 902.752953] env[62627]: DEBUG nova.network.neutron [req-37968747-8747-40fd-ab8b-1f2627822238 req-4fd7111f-da49-49d8-88a2-35602b00a866 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Updating instance_info_cache with network_info: [{"id": "7c970a80-e9f6-4f52-944e-27b8cc62ad4b", "address": "fa:16:3e:90:3a:fe", "network": {"id": "16be9b2c-9bf7-4e25-9a55-da270442b8f0", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1281567126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc28d39b16924be5889b7c027da5f98b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c970a80-e9", "ovs_interfaceid": "7c970a80-e9f6-4f52-944e-27b8cc62ad4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.798321] env[62627]: DEBUG nova.compute.utils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 902.803976] env[62627]: DEBUG nova.compute.manager [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Not allocating networking since 'none' was specified. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 902.924960] env[62627]: DEBUG nova.network.neutron [-] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.046228] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194463, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.060033] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cd842f98-9835-46e0-92ed-af56d2ea1450 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.305s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.068656] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194464, 'name': Rename_Task, 'duration_secs': 0.145916} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.069819] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.071042] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d5a2b8e-abc9-4cf4-948a-105c2c0db4cf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.081398] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 903.081398] env[62627]: value = "task-2194465" [ 903.081398] env[62627]: _type = "Task" [ 903.081398] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.096795] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194465, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.143953] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522e41c0-083f-31ca-e6d5-c9a1f6b27380, 'name': SearchDatastore_Task, 'duration_secs': 0.017323} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.147163] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-057b715b-5713-493d-82e8-d9afcc917cea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.154244] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for the task: (returnval){ [ 903.154244] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fb2f18-cf1c-4e72-fea5-07771244356f" [ 903.154244] env[62627]: _type = "Task" [ 903.154244] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.164320] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fb2f18-cf1c-4e72-fea5-07771244356f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.258611] env[62627]: DEBUG oslo_concurrency.lockutils [req-37968747-8747-40fd-ab8b-1f2627822238 req-4fd7111f-da49-49d8-88a2-35602b00a866 service nova] Releasing lock "refresh_cache-c9088ac9-456b-4c92-8773-79c263e7cc2a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.308055] env[62627]: DEBUG nova.compute.manager [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 903.428179] env[62627]: DEBUG nova.compute.manager [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Stashing vm_state: active {{(pid=62627) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 903.430110] env[62627]: INFO nova.compute.manager [-] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Took 1.52 seconds to deallocate network for instance. [ 903.445417] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquiring lock "ae4ec050-5062-4e57-8090-f67f8c8013ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.445653] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Lock "ae4ec050-5062-4e57-8090-f67f8c8013ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.549710] env[62627]: DEBUG oslo_vmware.api [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194463, 'name': PowerOnVM_Task, 'duration_secs': 0.80897} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.550219] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.550510] env[62627]: INFO nova.compute.manager [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Took 12.16 seconds to spawn the instance on the hypervisor. [ 903.551587] env[62627]: DEBUG nova.compute.manager [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 903.553529] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8478e726-5a54-4c6f-9792-1c31b1002f7a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.560524] env[62627]: DEBUG nova.network.neutron [-] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.569688] env[62627]: DEBUG nova.compute.manager [None req-32ef16b3-e927-432e-9947-b14ab10d70c1 tempest-ServersListShow296Test-1206191576 tempest-ServersListShow296Test-1206191576-project-member] [instance: 582ebeb1-e916-45f7-b569-96ae3a7acda5] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 903.594655] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194465, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.612779] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.613022] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.667506] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fb2f18-cf1c-4e72-fea5-07771244356f, 'name': SearchDatastore_Task, 'duration_secs': 0.028481} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.668101] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.668869] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] c9088ac9-456b-4c92-8773-79c263e7cc2a/c9088ac9-456b-4c92-8773-79c263e7cc2a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 903.668869] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d85ac3e-d5e6-438b-955c-fbfbe2d85ac2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.682874] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for the task: (returnval){ [ 903.682874] env[62627]: value = "task-2194466" [ 903.682874] env[62627]: _type = "Task" [ 903.682874] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.695318] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.783494] env[62627]: DEBUG nova.compute.manager [req-cea6cf02-b0a2-4d7d-9db5-f6261c6f67e6 req-57d53a4c-4ab8-4df8-a899-609f0eb3ef0a service nova] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Received event network-vif-deleted-40b19e0d-048f-4bca-bfea-412ad98b0568 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 903.783709] env[62627]: DEBUG nova.compute.manager [req-cea6cf02-b0a2-4d7d-9db5-f6261c6f67e6 req-57d53a4c-4ab8-4df8-a899-609f0eb3ef0a service nova] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Received event network-vif-deleted-93a8b363-e41a-4ac2-abf1-274395673a5b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 903.944340] env[62627]: DEBUG oslo_concurrency.lockutils [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.950322] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.952016] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7fed8bc-f40f-4a31-9fd6-3d8277a4eaf5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.963943] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c1ee00-fea7-4c41-8c4e-c32f558f4abd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.000956] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07818b21-08ac-4726-bb04-556b269fc9b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.008357] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03658469-ded0-425e-b2ad-3d49f3ed092f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.024777] env[62627]: DEBUG nova.compute.provider_tree [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.065190] env[62627]: INFO nova.compute.manager [-] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Took 1.44 seconds to deallocate network for instance. [ 904.081078] env[62627]: DEBUG nova.compute.manager [None req-32ef16b3-e927-432e-9947-b14ab10d70c1 tempest-ServersListShow296Test-1206191576 tempest-ServersListShow296Test-1206191576-project-member] [instance: 582ebeb1-e916-45f7-b569-96ae3a7acda5] Instance disappeared before build. {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 904.089168] env[62627]: INFO nova.compute.manager [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Took 48.62 seconds to build instance. [ 904.103877] env[62627]: DEBUG oslo_vmware.api [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194465, 'name': PowerOnVM_Task, 'duration_secs': 0.51439} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.103877] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.103877] env[62627]: INFO nova.compute.manager [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Took 10.00 seconds to spawn the instance on the hypervisor. [ 904.103877] env[62627]: DEBUG nova.compute.manager [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.104360] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41fea506-a984-4d35-a59e-9749b218f722 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.195256] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194466, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.329724] env[62627]: DEBUG nova.compute.manager [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 904.384179] env[62627]: DEBUG nova.virt.hardware [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 904.384861] env[62627]: DEBUG nova.virt.hardware [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 904.384861] env[62627]: DEBUG nova.virt.hardware [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 904.384861] env[62627]: DEBUG nova.virt.hardware [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 904.385255] env[62627]: DEBUG nova.virt.hardware [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 904.385255] env[62627]: DEBUG nova.virt.hardware [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 904.385502] env[62627]: DEBUG nova.virt.hardware [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 904.385743] env[62627]: DEBUG nova.virt.hardware [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 904.385947] env[62627]: DEBUG nova.virt.hardware [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 904.386220] env[62627]: DEBUG nova.virt.hardware [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 904.386401] env[62627]: DEBUG nova.virt.hardware [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 904.387369] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df377a45-9af4-4e5c-b8d6-947ded448469 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.391684] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "574293e3-8ec8-410f-8a30-83bc60d25d64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.391936] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.392180] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.392408] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.392598] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.395665] env[62627]: INFO nova.compute.manager [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Terminating instance [ 904.403568] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2208fd82-cc1e-44c9-b437-5241c1fc028f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.426142] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 904.433602] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Creating folder: Project (d8b16483b2f142198ea759b729ef6011). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 904.434466] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5b65d11-4a2c-473b-a7d6-6eb9321a9066 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.453714] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Created folder: Project (d8b16483b2f142198ea759b729ef6011) in parent group-v447541. [ 904.453714] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Creating folder: Instances. Parent ref: group-v447679. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 904.454535] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d56087ce-1260-4a0e-86b1-e195c5c3c34e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.468912] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Created folder: Instances in parent group-v447679. [ 904.469198] env[62627]: DEBUG oslo.service.loopingcall [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.469464] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 904.469708] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b339de4d-bb5c-4fc1-9618-dd21b3742553 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.491394] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 904.491394] env[62627]: value = "task-2194469" [ 904.491394] env[62627]: _type = "Task" [ 904.491394] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.502502] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194469, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.528037] env[62627]: DEBUG nova.scheduler.client.report [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 904.588352] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.598656] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b51765bb-0272-4324-9976-aeb779899da9 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.708s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.607890] env[62627]: DEBUG oslo_concurrency.lockutils [None req-32ef16b3-e927-432e-9947-b14ab10d70c1 tempest-ServersListShow296Test-1206191576 tempest-ServersListShow296Test-1206191576-project-member] Lock "582ebeb1-e916-45f7-b569-96ae3a7acda5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.907s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.624914] env[62627]: INFO nova.compute.manager [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Took 38.04 seconds to build instance. [ 904.696032] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194466, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.74388} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.696158] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] c9088ac9-456b-4c92-8773-79c263e7cc2a/c9088ac9-456b-4c92-8773-79c263e7cc2a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 904.696512] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 904.696634] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a6c7eb9-45ee-4a9c-aa7f-4c5e936db522 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.706562] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for the task: (returnval){ [ 904.706562] env[62627]: value = "task-2194470" [ 904.706562] env[62627]: _type = "Task" [ 904.706562] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.716846] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194470, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.909851] env[62627]: DEBUG nova.compute.manager [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 904.910098] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 904.911054] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26eb8d6e-74d9-4ed2-bde5-e26590f91c51 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.920679] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 904.921525] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f056111-0b9c-444c-ae40-2c49713fb2e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.934430] env[62627]: DEBUG oslo_vmware.api [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 904.934430] env[62627]: value = "task-2194471" [ 904.934430] env[62627]: _type = "Task" [ 904.934430] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.949018] env[62627]: DEBUG oslo_vmware.api [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.002769] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194469, 'name': CreateVM_Task, 'duration_secs': 0.350596} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.002949] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 905.003412] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.003562] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.003890] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 905.004161] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03d37cfc-bdcd-4eb4-8536-58eb9a1c8d29 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.009990] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 905.009990] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52408ed3-770a-9481-ef4e-9ae3c2d9d674" [ 905.009990] env[62627]: _type = "Task" [ 905.009990] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.021761] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52408ed3-770a-9481-ef4e-9ae3c2d9d674, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.034921] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.743s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.035558] env[62627]: DEBUG nova.compute.manager [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 905.039748] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.017s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.039976] env[62627]: DEBUG nova.objects.instance [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Lazy-loading 'resources' on Instance uuid 7691c2e2-4e43-4990-bf4d-2ef3298a4264 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.104032] env[62627]: DEBUG nova.compute.manager [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 905.111210] env[62627]: DEBUG nova.compute.manager [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 905.128391] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fccbd0e8-51b9-4701-82ce-b34b917d1e79 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.089s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.218533] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194470, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077463} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.219418] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 905.220324] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78eb150c-5360-46be-b0b0-916e7f058913 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.246138] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] c9088ac9-456b-4c92-8773-79c263e7cc2a/c9088ac9-456b-4c92-8773-79c263e7cc2a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.246910] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2a784e5-b498-48b7-b3c2-137ffd8535e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.272058] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for the task: (returnval){ [ 905.272058] env[62627]: value = "task-2194472" [ 905.272058] env[62627]: _type = "Task" [ 905.272058] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.284058] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194472, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.447237] env[62627]: DEBUG oslo_vmware.api [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194471, 'name': PowerOffVM_Task, 'duration_secs': 0.259717} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.447612] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.447811] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 905.448155] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a85af75d-505f-4127-8fda-63d46a1c47f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.522158] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52408ed3-770a-9481-ef4e-9ae3c2d9d674, 'name': SearchDatastore_Task, 'duration_secs': 0.020214} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.522445] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.522697] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 905.522927] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.523086] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.523292] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 905.523601] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad35ddaa-dc51-4a6f-b4f2-6618cd52d8da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.539317] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 905.539519] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 905.540391] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9f4c2b8-1afd-45ee-a636-f2dfa1b92de1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.544153] env[62627]: DEBUG nova.compute.utils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 905.550133] env[62627]: DEBUG nova.compute.manager [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 905.550328] env[62627]: DEBUG nova.network.neutron [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 905.557112] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 905.557112] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52048da0-9e3c-7f9b-add7-034f79a80557" [ 905.557112] env[62627]: _type = "Task" [ 905.557112] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.569837] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de621e4e-c19e-46ca-b014-066b0c04cad2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.580203] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52048da0-9e3c-7f9b-add7-034f79a80557, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.587401] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e57cd8f8-71b6-42c6-9e20-f4fbc7a9bc4a tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Suspending the VM {{(pid=62627) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 905.587676] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-fb85137d-4f3e-400c-af7d-1e97765432cc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.597034] env[62627]: DEBUG oslo_vmware.api [None req-e57cd8f8-71b6-42c6-9e20-f4fbc7a9bc4a tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 905.597034] env[62627]: value = "task-2194474" [ 905.597034] env[62627]: _type = "Task" [ 905.597034] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.608104] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 905.608104] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 905.608348] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Deleting the datastore file [datastore2] 574293e3-8ec8-410f-8a30-83bc60d25d64 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 905.611975] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f95d4628-597f-4352-920b-8cdea709bedd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.624575] env[62627]: DEBUG oslo_vmware.api [None req-e57cd8f8-71b6-42c6-9e20-f4fbc7a9bc4a tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194474, 'name': SuspendVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.630908] env[62627]: DEBUG nova.policy [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1253a87d708448aba8d97fcabe137853', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c9263b20f714274a55b88a8bdbd5251', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 905.633224] env[62627]: DEBUG nova.compute.manager [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 905.642165] env[62627]: DEBUG oslo_vmware.api [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 905.642165] env[62627]: value = "task-2194475" [ 905.642165] env[62627]: _type = "Task" [ 905.642165] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.645909] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.651243] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.659136] env[62627]: DEBUG oslo_vmware.api [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194475, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.783357] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194472, 'name': ReconfigVM_Task, 'duration_secs': 0.300875} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.787051] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Reconfigured VM instance instance-00000030 to attach disk [datastore1] c9088ac9-456b-4c92-8773-79c263e7cc2a/c9088ac9-456b-4c92-8773-79c263e7cc2a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 905.788051] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54a52c09-af92-42dc-937d-e75e852bbdcb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.798653] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for the task: (returnval){ [ 905.798653] env[62627]: value = "task-2194476" [ 905.798653] env[62627]: _type = "Task" [ 905.798653] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.808742] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194476, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.052905] env[62627]: DEBUG nova.compute.manager [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 906.072816] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52048da0-9e3c-7f9b-add7-034f79a80557, 'name': SearchDatastore_Task, 'duration_secs': 0.021569} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.073714] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82ee6114-ac4d-40ae-92e6-24b21b8263be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.084365] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 906.084365] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52550c9b-d995-dfbf-e308-53e133810fba" [ 906.084365] env[62627]: _type = "Task" [ 906.084365] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.097243] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52550c9b-d995-dfbf-e308-53e133810fba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.110764] env[62627]: DEBUG oslo_vmware.api [None req-e57cd8f8-71b6-42c6-9e20-f4fbc7a9bc4a tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194474, 'name': SuspendVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.160334] env[62627]: DEBUG oslo_vmware.api [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194475, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.171666] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.179008] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6a97ae-c5a2-4f8a-87b4-c8218fe0f16a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.188882] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d17140-20ea-4501-99b1-2648f4b4a2c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.232561] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607a30de-e7ed-422e-ba7b-4b8216b24df5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.242596] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a304f2a2-6dc8-4a13-a352-4776b72e8b0c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.261070] env[62627]: DEBUG nova.compute.provider_tree [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.313027] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194476, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.394979] env[62627]: DEBUG oslo_vmware.rw_handles [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954380-d016-feee-0a09-87a006acf023/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 906.395915] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edbbd79-5e96-4e22-842c-d45637c347b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.403559] env[62627]: DEBUG oslo_vmware.rw_handles [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954380-d016-feee-0a09-87a006acf023/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 906.403743] env[62627]: ERROR oslo_vmware.rw_handles [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954380-d016-feee-0a09-87a006acf023/disk-0.vmdk due to incomplete transfer. [ 906.403973] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-aeb7df63-e2ed-42af-8a86-24689fa3d457 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.412707] env[62627]: DEBUG oslo_vmware.rw_handles [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954380-d016-feee-0a09-87a006acf023/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 906.412920] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Uploaded image 62effbcb-0336-416d-b766-b7800c022151 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 906.415795] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 906.417326] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b6d974cc-143f-46f6-812d-a4375a51146d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.421764] env[62627]: DEBUG oslo_concurrency.lockutils [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] Acquiring lock "refresh_cache-87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.421764] env[62627]: DEBUG oslo_concurrency.lockutils [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] Acquired lock "refresh_cache-87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.421764] env[62627]: DEBUG nova.network.neutron [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.431336] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 906.431336] env[62627]: value = "task-2194477" [ 906.431336] env[62627]: _type = "Task" [ 906.431336] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.442794] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194477, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.460176] env[62627]: DEBUG nova.network.neutron [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Successfully created port: 8163fefd-89a3-4ff5-bbc9-e1561f91bc68 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 906.599733] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52550c9b-d995-dfbf-e308-53e133810fba, 'name': SearchDatastore_Task, 'duration_secs': 0.024085} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.599733] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.600066] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 7804914c-7df8-45c2-a934-67aa73afb2e2/7804914c-7df8-45c2-a934-67aa73afb2e2.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 906.600425] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73eeeb7a-27c7-4164-a476-591e600cb41a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.615367] env[62627]: DEBUG oslo_vmware.api [None req-e57cd8f8-71b6-42c6-9e20-f4fbc7a9bc4a tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194474, 'name': SuspendVM_Task, 'duration_secs': 0.869024} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.617528] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e57cd8f8-71b6-42c6-9e20-f4fbc7a9bc4a tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Suspended the VM {{(pid=62627) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 906.618242] env[62627]: DEBUG nova.compute.manager [None req-e57cd8f8-71b6-42c6-9e20-f4fbc7a9bc4a tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 906.619055] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 906.619055] env[62627]: value = "task-2194478" [ 906.619055] env[62627]: _type = "Task" [ 906.619055] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.619920] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39157f6f-4709-483c-b99b-a5222ef766d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.634980] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194478, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.664023] env[62627]: DEBUG oslo_vmware.api [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194475, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.580927} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.664490] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.664808] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 906.665053] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.665448] env[62627]: INFO nova.compute.manager [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Took 1.76 seconds to destroy the instance on the hypervisor. [ 906.665770] env[62627]: DEBUG oslo.service.loopingcall [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.666072] env[62627]: DEBUG nova.compute.manager [-] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 906.666201] env[62627]: DEBUG nova.network.neutron [-] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.767892] env[62627]: DEBUG nova.scheduler.client.report [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 906.812162] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194476, 'name': Rename_Task, 'duration_secs': 0.887001} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.812705] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 906.813531] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d445c51-131c-4006-80a4-89bd6c58a149 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.824828] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for the task: (returnval){ [ 906.824828] env[62627]: value = "task-2194479" [ 906.824828] env[62627]: _type = "Task" [ 906.824828] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.836820] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194479, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.942729] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194477, 'name': Destroy_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.065433] env[62627]: DEBUG nova.compute.manager [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 907.095327] env[62627]: DEBUG nova.virt.hardware [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 907.095731] env[62627]: DEBUG nova.virt.hardware [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.096287] env[62627]: DEBUG nova.virt.hardware [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 907.096287] env[62627]: DEBUG nova.virt.hardware [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.096287] env[62627]: DEBUG nova.virt.hardware [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 907.096287] env[62627]: DEBUG nova.virt.hardware [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 907.096552] env[62627]: DEBUG nova.virt.hardware [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 907.096644] env[62627]: DEBUG nova.virt.hardware [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 907.096822] env[62627]: DEBUG nova.virt.hardware [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 907.097030] env[62627]: DEBUG nova.virt.hardware [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 907.098341] env[62627]: DEBUG nova.virt.hardware [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 907.098341] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83e587d-14ed-48bb-ab8b-6aa2e1c2174e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.113024] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315a3ad2-a8ef-445a-9b38-3a0fe76323da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.140047] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194478, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.228305] env[62627]: DEBUG nova.network.neutron [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Updating instance_info_cache with network_info: [{"id": "dca7e38e-83c7-4679-824f-f98d9fc325ad", "address": "fa:16:3e:7c:d9:61", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdca7e38e-83", "ovs_interfaceid": "dca7e38e-83c7-4679-824f-f98d9fc325ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.274421] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.234s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.279463] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.844s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.279767] env[62627]: DEBUG nova.objects.instance [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lazy-loading 'resources' on Instance uuid d28285ed-2ec0-43a4-bd34-82d33816fea4 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 907.314926] env[62627]: INFO nova.scheduler.client.report [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Deleted allocations for instance 7691c2e2-4e43-4990-bf4d-2ef3298a4264 [ 907.338181] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194479, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.442491] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194477, 'name': Destroy_Task, 'duration_secs': 0.715935} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.442763] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Destroyed the VM [ 907.443083] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 907.443363] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d2cefff6-c2eb-4b5c-ae18-0fec9b0019da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.452819] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 907.452819] env[62627]: value = "task-2194480" [ 907.452819] env[62627]: _type = "Task" [ 907.452819] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.466875] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194480, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.642991] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194478, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713854} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.643300] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 7804914c-7df8-45c2-a934-67aa73afb2e2/7804914c-7df8-45c2-a934-67aa73afb2e2.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 907.643493] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 907.643755] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2fd704bc-ca26-492e-bb9d-cb006827d781 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.655146] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 907.655146] env[62627]: value = "task-2194481" [ 907.655146] env[62627]: _type = "Task" [ 907.655146] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.670144] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194481, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.731272] env[62627]: DEBUG oslo_concurrency.lockutils [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] Releasing lock "refresh_cache-87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.731595] env[62627]: DEBUG nova.compute.manager [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Inject network info {{(pid=62627) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 907.734702] env[62627]: DEBUG nova.compute.manager [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] network_info to inject: |[{"id": "dca7e38e-83c7-4679-824f-f98d9fc325ad", "address": "fa:16:3e:7c:d9:61", "network": {"id": "4e8b6953-adb2-4977-8315-179e01e807e0", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1952492535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14238d8125674b63a7cf4452cb260283", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdca7e38e-83", "ovs_interfaceid": "dca7e38e-83c7-4679-824f-f98d9fc325ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 907.737653] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Reconfiguring VM instance to set the machine id {{(pid=62627) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 907.737954] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac018f67-b9ed-4558-917f-d01ebf049a6b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.756878] env[62627]: DEBUG oslo_vmware.api [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] Waiting for the task: (returnval){ [ 907.756878] env[62627]: value = "task-2194482" [ 907.756878] env[62627]: _type = "Task" [ 907.756878] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.766446] env[62627]: DEBUG oslo_vmware.api [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] Task: {'id': task-2194482, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.822230] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c6ae5c6a-8ece-482b-bb89-2f6bb3f66ec8 tempest-ServersTestJSON-774440597 tempest-ServersTestJSON-774440597-project-member] Lock "7691c2e2-4e43-4990-bf4d-2ef3298a4264" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.924s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.840067] env[62627]: DEBUG oslo_vmware.api [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194479, 'name': PowerOnVM_Task, 'duration_secs': 0.712055} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.840960] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.841196] env[62627]: INFO nova.compute.manager [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Took 10.98 seconds to spawn the instance on the hypervisor. [ 907.841379] env[62627]: DEBUG nova.compute.manager [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 907.842227] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d544f5a9-9966-44c9-b374-3bf88d05d15d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.964810] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194480, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.065218] env[62627]: DEBUG nova.compute.manager [req-c9b116fd-5add-4b08-af32-0cc61b0b79ac req-8ef85a41-67a4-4345-9a42-5c1b69163d3e service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received event network-vif-deleted-e8a543dc-e58e-4c74-a763-b795643a2d53 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 908.065370] env[62627]: INFO nova.compute.manager [req-c9b116fd-5add-4b08-af32-0cc61b0b79ac req-8ef85a41-67a4-4345-9a42-5c1b69163d3e service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Neutron deleted interface e8a543dc-e58e-4c74-a763-b795643a2d53; detaching it from the instance and deleting it from the info cache [ 908.069022] env[62627]: DEBUG nova.network.neutron [req-c9b116fd-5add-4b08-af32-0cc61b0b79ac req-8ef85a41-67a4-4345-9a42-5c1b69163d3e service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Updating instance_info_cache with network_info: [{"id": "6bf6cde4-6fe7-4f05-a8ed-b57e621b565b", "address": "fa:16:3e:65:58:4e", "network": {"id": "a2b42bd2-d4d5-4d9a-9f84-0a6c9158114f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-988988055", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bf6cde4-6f", "ovs_interfaceid": "6bf6cde4-6fe7-4f05-a8ed-b57e621b565b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e9aeb5b9-eeec-406e-a7d0-6207531ec459", "address": "fa:16:3e:1c:d1:bb", "network": {"id": "3f097461-afdd-4fac-a30a-555f765c38e6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1093830906", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb18870e-f482-4c7b-8cd4-5c933d3ad294", "external-id": "nsx-vlan-transportzone-76", "segmentation_id": 76, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9aeb5b9-ee", "ovs_interfaceid": "e9aeb5b9-eeec-406e-a7d0-6207531ec459", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.166180] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194481, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073401} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.166180] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 908.167916] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fca129f-0def-4fe8-bd28-185010a21554 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.196554] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 7804914c-7df8-45c2-a934-67aa73afb2e2/7804914c-7df8-45c2-a934-67aa73afb2e2.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.202141] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1f59196-02c1-4bfc-b699-b0bb3d1ea718 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.223731] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 908.223731] env[62627]: value = "task-2194483" [ 908.223731] env[62627]: _type = "Task" [ 908.223731] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.237542] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194483, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.267746] env[62627]: DEBUG oslo_vmware.api [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] Task: {'id': task-2194482, 'name': ReconfigVM_Task, 'duration_secs': 0.220097} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.270452] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-93272dc4-b586-4565-9de4-229a9c87127c tempest-ServersAdminTestJSON-2035493786 tempest-ServersAdminTestJSON-2035493786-project-admin] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Reconfigured VM instance to set the machine id {{(pid=62627) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 908.370561] env[62627]: INFO nova.compute.manager [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Took 39.49 seconds to build instance. [ 908.400783] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e175f236-b499-4351-acb6-b68909efa08c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.410709] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f062bbd-f388-46cf-8c11-7600fc53aecb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.450159] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fb870b-388d-4a94-ab10-3110e67cda1a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.462936] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206fcc4d-e79f-4724-b513-4874d5d2c9c7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.473137] env[62627]: DEBUG oslo_vmware.api [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194480, 'name': RemoveSnapshot_Task, 'duration_secs': 0.558756} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.473914] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 908.474284] env[62627]: INFO nova.compute.manager [None req-8f99e995-067a-40e6-bf58-99b5ee477e13 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Took 17.48 seconds to snapshot the instance on the hypervisor. [ 908.486407] env[62627]: DEBUG nova.compute.provider_tree [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.574922] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b03a230-88f7-459e-a5ac-f7b9899ad82f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.585951] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c904a7-21e3-49fd-938f-de5bc65c9c60 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.626092] env[62627]: DEBUG nova.compute.manager [req-c9b116fd-5add-4b08-af32-0cc61b0b79ac req-8ef85a41-67a4-4345-9a42-5c1b69163d3e service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Detach interface failed, port_id=e8a543dc-e58e-4c74-a763-b795643a2d53, reason: Instance 574293e3-8ec8-410f-8a30-83bc60d25d64 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 908.724964] env[62627]: DEBUG nova.network.neutron [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Successfully updated port: 8163fefd-89a3-4ff5-bbc9-e1561f91bc68 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 908.741349] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194483, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.871413] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c0a5fdc9-c024-4ab2-8f1b-684df03dd9c0 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Lock "c9088ac9-456b-4c92-8773-79c263e7cc2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.141s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.881356] env[62627]: DEBUG nova.compute.manager [req-208426f5-9a8d-45f4-9020-9893dbd04ea9 req-4c22c2e7-1c42-4835-8253-ef91af6a4569 service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Received event network-vif-plugged-8163fefd-89a3-4ff5-bbc9-e1561f91bc68 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 908.881356] env[62627]: DEBUG oslo_concurrency.lockutils [req-208426f5-9a8d-45f4-9020-9893dbd04ea9 req-4c22c2e7-1c42-4835-8253-ef91af6a4569 service nova] Acquiring lock "d0786233-087b-4000-8c37-fd5c55cc0ef4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.881356] env[62627]: DEBUG oslo_concurrency.lockutils [req-208426f5-9a8d-45f4-9020-9893dbd04ea9 req-4c22c2e7-1c42-4835-8253-ef91af6a4569 service nova] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.881356] env[62627]: DEBUG oslo_concurrency.lockutils [req-208426f5-9a8d-45f4-9020-9893dbd04ea9 req-4c22c2e7-1c42-4835-8253-ef91af6a4569 service nova] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.881356] env[62627]: DEBUG nova.compute.manager [req-208426f5-9a8d-45f4-9020-9893dbd04ea9 req-4c22c2e7-1c42-4835-8253-ef91af6a4569 service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] No waiting events found dispatching network-vif-plugged-8163fefd-89a3-4ff5-bbc9-e1561f91bc68 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 908.881356] env[62627]: WARNING nova.compute.manager [req-208426f5-9a8d-45f4-9020-9893dbd04ea9 req-4c22c2e7-1c42-4835-8253-ef91af6a4569 service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Received unexpected event network-vif-plugged-8163fefd-89a3-4ff5-bbc9-e1561f91bc68 for instance with vm_state building and task_state spawning. [ 908.993317] env[62627]: DEBUG nova.scheduler.client.report [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 909.102448] env[62627]: DEBUG nova.network.neutron [-] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.231868] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "refresh_cache-d0786233-087b-4000-8c37-fd5c55cc0ef4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.232087] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "refresh_cache-d0786233-087b-4000-8c37-fd5c55cc0ef4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.232257] env[62627]: DEBUG nova.network.neutron [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 909.240578] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194483, 'name': ReconfigVM_Task, 'duration_secs': 0.840258} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.241149] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 7804914c-7df8-45c2-a934-67aa73afb2e2/7804914c-7df8-45c2-a934-67aa73afb2e2.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.241770] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a772d791-f143-45a1-8acf-dd7fec32f644 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.253325] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 909.253325] env[62627]: value = "task-2194484" [ 909.253325] env[62627]: _type = "Task" [ 909.253325] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.265430] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194484, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.376815] env[62627]: DEBUG nova.compute.manager [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 909.501139] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.220s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.504252] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.528s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.507815] env[62627]: DEBUG nova.objects.instance [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lazy-loading 'resources' on Instance uuid faa16f10-63f0-457c-8cd7-20a624454d86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.537488] env[62627]: INFO nova.scheduler.client.report [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Deleted allocations for instance d28285ed-2ec0-43a4-bd34-82d33816fea4 [ 909.605401] env[62627]: INFO nova.compute.manager [-] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Took 2.94 seconds to deallocate network for instance. [ 909.767141] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194484, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.804857] env[62627]: DEBUG nova.network.neutron [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 909.811715] env[62627]: DEBUG nova.compute.manager [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 909.812370] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30dc4309-5718-4a71-8a2b-e76a6b141e4a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.910261] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.048221] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f8acef66-d09b-4cf5-a130-55c74245bf6a tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "d28285ed-2ec0-43a4-bd34-82d33816fea4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.299s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.080162] env[62627]: DEBUG nova.network.neutron [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Updating instance_info_cache with network_info: [{"id": "8163fefd-89a3-4ff5-bbc9-e1561f91bc68", "address": "fa:16:3e:03:fc:a6", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8163fefd-89", "ovs_interfaceid": "8163fefd-89a3-4ff5-bbc9-e1561f91bc68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.116958] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.270205] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194484, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.328782] env[62627]: INFO nova.compute.manager [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] instance snapshotting [ 910.328990] env[62627]: WARNING nova.compute.manager [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 910.333694] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5acb72f1-5bb6-4875-9a5d-e919b3e90ca1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.358012] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630f317a-7863-4c44-911a-b840eb872ab1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.368160] env[62627]: DEBUG oslo_concurrency.lockutils [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquiring lock "c9088ac9-456b-4c92-8773-79c263e7cc2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.368405] env[62627]: DEBUG oslo_concurrency.lockutils [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Lock "c9088ac9-456b-4c92-8773-79c263e7cc2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.368604] env[62627]: DEBUG oslo_concurrency.lockutils [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquiring lock "c9088ac9-456b-4c92-8773-79c263e7cc2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.368782] env[62627]: DEBUG oslo_concurrency.lockutils [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Lock "c9088ac9-456b-4c92-8773-79c263e7cc2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.368948] env[62627]: DEBUG oslo_concurrency.lockutils [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Lock "c9088ac9-456b-4c92-8773-79c263e7cc2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.371394] env[62627]: INFO nova.compute.manager [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Terminating instance [ 910.538579] env[62627]: DEBUG nova.compute.manager [req-e9beb7a8-deb6-408c-bfe2-6581d24d7a09 req-bb15f88b-a4ea-41e1-b412-404b3437dacb service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received event network-vif-deleted-e9aeb5b9-eeec-406e-a7d0-6207531ec459 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 910.538579] env[62627]: DEBUG nova.compute.manager [req-e9beb7a8-deb6-408c-bfe2-6581d24d7a09 req-bb15f88b-a4ea-41e1-b412-404b3437dacb service nova] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Received event network-vif-deleted-6bf6cde4-6fe7-4f05-a8ed-b57e621b565b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 910.583139] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "refresh_cache-d0786233-087b-4000-8c37-fd5c55cc0ef4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.583536] env[62627]: DEBUG nova.compute.manager [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Instance network_info: |[{"id": "8163fefd-89a3-4ff5-bbc9-e1561f91bc68", "address": "fa:16:3e:03:fc:a6", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8163fefd-89", "ovs_interfaceid": "8163fefd-89a3-4ff5-bbc9-e1561f91bc68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 910.584171] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:fc:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99be9a5e-b3f9-4e6c-83d5-df11f817847d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8163fefd-89a3-4ff5-bbc9-e1561f91bc68', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 910.597019] env[62627]: DEBUG oslo.service.loopingcall [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 910.597019] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 910.597739] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c1c1ec-d5e7-46db-a7c5-2b8fe00f9193 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.600900] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-138af200-4c1d-449b-bd95-0b5712cfa451 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.624602] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3256c168-9e19-43c3-88b9-014b79a353fe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.629128] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 910.629128] env[62627]: value = "task-2194485" [ 910.629128] env[62627]: _type = "Task" [ 910.629128] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.664212] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8416dba1-3f1c-430a-9233-97f95a9750aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.674166] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66188f4-f02d-47b2-ba04-9b799704c0f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.683087] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194485, 'name': CreateVM_Task} progress is 15%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.691249] env[62627]: DEBUG nova.compute.provider_tree [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.766435] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194484, 'name': Rename_Task, 'duration_secs': 1.329917} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.766738] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 910.766907] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56647395-50eb-4d7f-9dc4-3264f9907083 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.774835] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 910.774835] env[62627]: value = "task-2194486" [ 910.774835] env[62627]: _type = "Task" [ 910.774835] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.783558] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194486, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.871882] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 910.872331] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ee2df6ce-172e-4610-a08d-4dafea1db3bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.878513] env[62627]: DEBUG nova.compute.manager [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 910.878680] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.879698] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8256b38-13f2-4c36-b515-5fe0c80b0984 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.885869] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 910.885869] env[62627]: value = "task-2194487" [ 910.885869] env[62627]: _type = "Task" [ 910.885869] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.890486] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.891110] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c297858-1a19-4e5d-957a-1c034407a9f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.897410] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194487, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.899159] env[62627]: DEBUG oslo_vmware.api [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for the task: (returnval){ [ 910.899159] env[62627]: value = "task-2194488" [ 910.899159] env[62627]: _type = "Task" [ 910.899159] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.909033] env[62627]: DEBUG oslo_vmware.api [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194488, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.143601] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194485, 'name': CreateVM_Task, 'duration_secs': 0.459452} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.143763] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 911.144634] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.144798] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.145184] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 911.145476] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52a77bac-e4ac-4bc8-ab0a-56e3c36308dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.151447] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 911.151447] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5223a608-c76c-d129-3ff3-76e3bd2284dc" [ 911.151447] env[62627]: _type = "Task" [ 911.151447] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.163270] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5223a608-c76c-d129-3ff3-76e3bd2284dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.196604] env[62627]: DEBUG nova.scheduler.client.report [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 911.289471] env[62627]: DEBUG oslo_vmware.api [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194486, 'name': PowerOnVM_Task, 'duration_secs': 0.514067} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.289540] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 911.290026] env[62627]: INFO nova.compute.manager [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Took 6.96 seconds to spawn the instance on the hypervisor. [ 911.290026] env[62627]: DEBUG nova.compute.manager [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 911.290802] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2832259-a393-4665-9a85-64f233404fdd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.403765] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194487, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.417175] env[62627]: DEBUG oslo_vmware.api [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194488, 'name': PowerOffVM_Task, 'duration_secs': 0.201081} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.417548] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.417770] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.418190] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-104a1a6e-01e5-4e07-bb19-265825c336af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.484271] env[62627]: DEBUG nova.compute.manager [req-e29a4550-70aa-41c1-b1ec-fd0002972c3e req-7d5f76e6-66ef-4bcc-a7bc-0254eb255d8b service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Received event network-changed-8163fefd-89a3-4ff5-bbc9-e1561f91bc68 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 911.484600] env[62627]: DEBUG nova.compute.manager [req-e29a4550-70aa-41c1-b1ec-fd0002972c3e req-7d5f76e6-66ef-4bcc-a7bc-0254eb255d8b service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Refreshing instance network info cache due to event network-changed-8163fefd-89a3-4ff5-bbc9-e1561f91bc68. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 911.484746] env[62627]: DEBUG oslo_concurrency.lockutils [req-e29a4550-70aa-41c1-b1ec-fd0002972c3e req-7d5f76e6-66ef-4bcc-a7bc-0254eb255d8b service nova] Acquiring lock "refresh_cache-d0786233-087b-4000-8c37-fd5c55cc0ef4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.484908] env[62627]: DEBUG oslo_concurrency.lockutils [req-e29a4550-70aa-41c1-b1ec-fd0002972c3e req-7d5f76e6-66ef-4bcc-a7bc-0254eb255d8b service nova] Acquired lock "refresh_cache-d0786233-087b-4000-8c37-fd5c55cc0ef4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.489056] env[62627]: DEBUG nova.network.neutron [req-e29a4550-70aa-41c1-b1ec-fd0002972c3e req-7d5f76e6-66ef-4bcc-a7bc-0254eb255d8b service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Refreshing network info cache for port 8163fefd-89a3-4ff5-bbc9-e1561f91bc68 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 911.664017] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5223a608-c76c-d129-3ff3-76e3bd2284dc, 'name': SearchDatastore_Task, 'duration_secs': 0.011409} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.664298] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.664604] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 911.664842] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.664990] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.665186] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.665749] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6149dde1-b21c-4862-ba81-1c979854b2d1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.675036] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.675227] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 911.676136] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea2047ae-4e70-492a-935d-7f13f46dd96c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.682157] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 911.682157] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52972e3e-6831-9b94-a364-61e9edf341be" [ 911.682157] env[62627]: _type = "Task" [ 911.682157] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.691988] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52972e3e-6831-9b94-a364-61e9edf341be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.702078] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.198s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.707043] env[62627]: DEBUG oslo_concurrency.lockutils [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.978s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.707743] env[62627]: DEBUG nova.objects.instance [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'resources' on Instance uuid a12855e2-1544-4e85-a7ff-c026fec861b6 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.739708] env[62627]: INFO nova.scheduler.client.report [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Deleted allocations for instance faa16f10-63f0-457c-8cd7-20a624454d86 [ 911.811515] env[62627]: INFO nova.compute.manager [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Took 36.41 seconds to build instance. [ 911.901719] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194487, 'name': CreateSnapshot_Task, 'duration_secs': 0.553638} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.902196] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 911.902958] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cf0e6a-e3b1-46bb-a659-809aad6727d9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.197400] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52972e3e-6831-9b94-a364-61e9edf341be, 'name': SearchDatastore_Task, 'duration_secs': 0.010352} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.198876] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2586147-78d5-45c7-97e8-b171650d6358 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.204424] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 912.204424] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a186b2-b1c8-c5ee-b034-314eb1623b7d" [ 912.204424] env[62627]: _type = "Task" [ 912.204424] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.220281] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a186b2-b1c8-c5ee-b034-314eb1623b7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.251652] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c78c57c4-3f40-4788-a7ae-32d7a8a29f92 tempest-AttachInterfacesUnderV243Test-1545049479 tempest-AttachInterfacesUnderV243Test-1545049479-project-member] Lock "faa16f10-63f0-457c-8cd7-20a624454d86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.788s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.313800] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0c412b15-7517-49b1-93d9-8c7eec2f10b9 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "7804914c-7df8-45c2-a934-67aa73afb2e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.232s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.322867] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 912.322955] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 912.323126] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Deleting the datastore file [datastore1] c9088ac9-456b-4c92-8773-79c263e7cc2a {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 912.323410] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c53cffef-e6b0-417d-8a12-3f38679a609c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.340865] env[62627]: DEBUG oslo_vmware.api [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for the task: (returnval){ [ 912.340865] env[62627]: value = "task-2194490" [ 912.340865] env[62627]: _type = "Task" [ 912.340865] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.357964] env[62627]: DEBUG oslo_vmware.api [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.409098] env[62627]: DEBUG nova.network.neutron [req-e29a4550-70aa-41c1-b1ec-fd0002972c3e req-7d5f76e6-66ef-4bcc-a7bc-0254eb255d8b service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Updated VIF entry in instance network info cache for port 8163fefd-89a3-4ff5-bbc9-e1561f91bc68. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 912.410244] env[62627]: DEBUG nova.network.neutron [req-e29a4550-70aa-41c1-b1ec-fd0002972c3e req-7d5f76e6-66ef-4bcc-a7bc-0254eb255d8b service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Updating instance_info_cache with network_info: [{"id": "8163fefd-89a3-4ff5-bbc9-e1561f91bc68", "address": "fa:16:3e:03:fc:a6", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8163fefd-89", "ovs_interfaceid": "8163fefd-89a3-4ff5-bbc9-e1561f91bc68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.423496] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 912.424623] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5f169a21-0047-4825-8e6e-d657c5586558 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.434936] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 912.434936] env[62627]: value = "task-2194491" [ 912.434936] env[62627]: _type = "Task" [ 912.434936] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.448603] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194491, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.718137] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a186b2-b1c8-c5ee-b034-314eb1623b7d, 'name': SearchDatastore_Task, 'duration_secs': 0.025929} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.720990] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.721545] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] d0786233-087b-4000-8c37-fd5c55cc0ef4/d0786233-087b-4000-8c37-fd5c55cc0ef4.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 912.722099] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11c55840-105a-4913-ac3e-d64f693f69e0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.751167] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 912.751167] env[62627]: value = "task-2194492" [ 912.751167] env[62627]: _type = "Task" [ 912.751167] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.761301] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.784701] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquiring lock "8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.785138] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lock "8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.827179] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a377da0-42c8-4dc4-9339-cd09aed6ce86 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.838888] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0135888a-628c-4b78-bb7c-220c1a640595 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.874632] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff97f95-99bc-4c37-9776-686311a794e5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.879835] env[62627]: DEBUG oslo_vmware.api [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Task: {'id': task-2194490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146897} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.880503] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 912.880731] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 912.880896] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 912.881093] env[62627]: INFO nova.compute.manager [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Took 2.00 seconds to destroy the instance on the hypervisor. [ 912.881360] env[62627]: DEBUG oslo.service.loopingcall [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.881570] env[62627]: DEBUG nova.compute.manager [-] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 912.881664] env[62627]: DEBUG nova.network.neutron [-] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 912.886965] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2148dffe-e989-467c-b64b-daa3b3738927 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.901924] env[62627]: DEBUG nova.compute.provider_tree [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.925735] env[62627]: DEBUG oslo_concurrency.lockutils [req-e29a4550-70aa-41c1-b1ec-fd0002972c3e req-7d5f76e6-66ef-4bcc-a7bc-0254eb255d8b service nova] Releasing lock "refresh_cache-d0786233-087b-4000-8c37-fd5c55cc0ef4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.946664] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194491, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.155864] env[62627]: INFO nova.compute.manager [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Rebuilding instance [ 913.225269] env[62627]: DEBUG nova.compute.manager [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 913.226100] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad56d25b-c776-4037-b76c-9488c5246e1b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.263492] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194492, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.289439] env[62627]: DEBUG nova.compute.manager [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 913.405766] env[62627]: DEBUG nova.scheduler.client.report [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.449880] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194491, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.767357] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194492, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619731} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.767724] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] d0786233-087b-4000-8c37-fd5c55cc0ef4/d0786233-087b-4000-8c37-fd5c55cc0ef4.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 913.767880] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.768200] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1149a220-6898-41bd-97ea-68727da464aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.776934] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 913.776934] env[62627]: value = "task-2194493" [ 913.776934] env[62627]: _type = "Task" [ 913.776934] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.789898] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.822132] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.886201] env[62627]: DEBUG nova.compute.manager [req-b429c1d7-5840-4f04-b0d2-7e7fe31ddf33 req-f2ed2850-0ad8-413c-840b-d8c4cd132013 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Received event network-vif-deleted-7c970a80-e9f6-4f52-944e-27b8cc62ad4b {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 913.886201] env[62627]: INFO nova.compute.manager [req-b429c1d7-5840-4f04-b0d2-7e7fe31ddf33 req-f2ed2850-0ad8-413c-840b-d8c4cd132013 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Neutron deleted interface 7c970a80-e9f6-4f52-944e-27b8cc62ad4b; detaching it from the instance and deleting it from the info cache [ 913.886201] env[62627]: DEBUG nova.network.neutron [req-b429c1d7-5840-4f04-b0d2-7e7fe31ddf33 req-f2ed2850-0ad8-413c-840b-d8c4cd132013 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.913758] env[62627]: DEBUG oslo_concurrency.lockutils [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.206s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.920300] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.371s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.922362] env[62627]: INFO nova.compute.claims [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.949596] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194491, 'name': CloneVM_Task, 'duration_secs': 1.490052} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.949924] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Created linked-clone VM from snapshot [ 913.950765] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767c0929-9837-4dfc-a811-f4b804b1afb8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.957317] env[62627]: INFO nova.scheduler.client.report [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleted allocations for instance a12855e2-1544-4e85-a7ff-c026fec861b6 [ 913.958032] env[62627]: DEBUG nova.network.neutron [-] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.968497] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Uploading image 9a6a372d-1423-4d9e-9642-a4a246be11f7 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 914.011608] env[62627]: DEBUG oslo_vmware.rw_handles [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 914.011608] env[62627]: value = "vm-447684" [ 914.011608] env[62627]: _type = "VirtualMachine" [ 914.011608] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 914.011919] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-08399f89-d5a1-4b48-9337-f54da9d1f680 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.024161] env[62627]: DEBUG oslo_vmware.rw_handles [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lease: (returnval){ [ 914.024161] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d1aedc-4caf-9639-8bd5-9d5ecd146e86" [ 914.024161] env[62627]: _type = "HttpNfcLease" [ 914.024161] env[62627]: } obtained for exporting VM: (result){ [ 914.024161] env[62627]: value = "vm-447684" [ 914.024161] env[62627]: _type = "VirtualMachine" [ 914.024161] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 914.024526] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the lease: (returnval){ [ 914.024526] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d1aedc-4caf-9639-8bd5-9d5ecd146e86" [ 914.024526] env[62627]: _type = "HttpNfcLease" [ 914.024526] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 914.037557] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 914.037557] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d1aedc-4caf-9639-8bd5-9d5ecd146e86" [ 914.037557] env[62627]: _type = "HttpNfcLease" [ 914.037557] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 914.037822] env[62627]: DEBUG oslo_vmware.rw_handles [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 914.037822] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d1aedc-4caf-9639-8bd5-9d5ecd146e86" [ 914.037822] env[62627]: _type = "HttpNfcLease" [ 914.037822] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 914.039229] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83207056-dbd1-417f-ba58-a025fffe1743 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.048756] env[62627]: DEBUG oslo_vmware.rw_handles [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52306819-2e65-0a65-7c88-011b75f0be1a/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 914.049186] env[62627]: DEBUG oslo_vmware.rw_handles [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52306819-2e65-0a65-7c88-011b75f0be1a/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 914.123022] env[62627]: INFO nova.compute.manager [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Rebuilding instance [ 914.146045] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-998c1207-5bfb-48a8-a4a6-9415d34e9218 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.180691] env[62627]: DEBUG nova.compute.manager [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 914.181801] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0847fcd-442d-4226-b9f5-204c79e7499f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.249284] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.250013] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b1d16c2-f635-4ade-a103-90e5363a32de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.259426] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 914.259426] env[62627]: value = "task-2194495" [ 914.259426] env[62627]: _type = "Task" [ 914.259426] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.270976] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.286220] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097053} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.286491] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 914.287311] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b942a3fb-e084-43a6-8446-c45589e27370 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.310091] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] d0786233-087b-4000-8c37-fd5c55cc0ef4/d0786233-087b-4000-8c37-fd5c55cc0ef4.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 914.310393] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebda225f-ef78-4d91-989d-c183ba7a6d61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.332650] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 914.332650] env[62627]: value = "task-2194496" [ 914.332650] env[62627]: _type = "Task" [ 914.332650] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.341663] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194496, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.392377] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8bed178b-63fa-4808-ab11-6d0b6f61b77e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.403543] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d91f7a1-c447-4b23-8fb6-526a7cc482de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.456712] env[62627]: DEBUG nova.compute.manager [req-b429c1d7-5840-4f04-b0d2-7e7fe31ddf33 req-f2ed2850-0ad8-413c-840b-d8c4cd132013 service nova] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Detach interface failed, port_id=7c970a80-e9f6-4f52-944e-27b8cc62ad4b, reason: Instance c9088ac9-456b-4c92-8773-79c263e7cc2a could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 914.466810] env[62627]: INFO nova.compute.manager [-] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Took 1.58 seconds to deallocate network for instance. [ 914.467342] env[62627]: DEBUG oslo_concurrency.lockutils [None req-560d8d6b-edca-4ab2-af38-da0c63ca679e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "a12855e2-1544-4e85-a7ff-c026fec861b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.381s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.770457] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.847788] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194496, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.977938] env[62627]: DEBUG oslo_concurrency.lockutils [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.201947] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 915.203980] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a9f5de5-1c75-4a5c-8dfc-6e3990fa7bf3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.211738] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 915.211738] env[62627]: value = "task-2194497" [ 915.211738] env[62627]: _type = "Task" [ 915.211738] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.229230] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194497, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.279394] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194495, 'name': PowerOffVM_Task, 'duration_secs': 0.941648} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.279979] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.280226] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 915.288882] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6b1e53-e88a-4f4e-888d-5f56ad2a13e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.299749] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.301316] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-236a08d3-1008-44a9-9d6c-38d2854513e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.355606] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194496, 'name': ReconfigVM_Task, 'duration_secs': 0.712142} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.355606] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Reconfigured VM instance instance-00000032 to attach disk [datastore2] d0786233-087b-4000-8c37-fd5c55cc0ef4/d0786233-087b-4000-8c37-fd5c55cc0ef4.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 915.356211] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a14e2a9b-35b7-4217-85d0-5b5bdc11768a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.368758] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 915.368758] env[62627]: value = "task-2194499" [ 915.368758] env[62627]: _type = "Task" [ 915.368758] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.386722] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194499, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.412909] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.413058] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.413205] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleting the datastore file [datastore2] 7366e4b0-bdba-435d-844d-8e81c4464b31 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.413492] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f090ae9-0344-4f97-a689-da4772f44e93 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.426190] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 915.426190] env[62627]: value = "task-2194500" [ 915.426190] env[62627]: _type = "Task" [ 915.426190] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.436993] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194500, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.559590] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9fcc8d-df00-4d4e-9f87-ed28fcf64917 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.569401] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebe9a8c-4d60-4cbb-888b-f6e433d46f4d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.609283] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abe3a3c-e804-4f8c-8760-c28e5f93e9e0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.619869] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a623102c-6733-4b4f-a7ea-1e935e907a6c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.640406] env[62627]: DEBUG nova.compute.provider_tree [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.726795] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194497, 'name': PowerOffVM_Task, 'duration_secs': 0.149829} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.727165] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.727609] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 915.728290] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1405627b-6285-4e80-8904-2ff29b3ae265 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.738668] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.738994] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17084cf6-0493-47e2-a269-a1f1f083bbbf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.771708] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.771957] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.772182] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Deleting the datastore file [datastore1] 7804914c-7df8-45c2-a934-67aa73afb2e2 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.772457] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d009d69-6958-4e9b-92d2-ede546a6dc9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.783605] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 915.783605] env[62627]: value = "task-2194502" [ 915.783605] env[62627]: _type = "Task" [ 915.783605] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.796794] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194502, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.881865] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194499, 'name': Rename_Task, 'duration_secs': 0.181463} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.882561] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 915.883218] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ebe44d0-6682-4bf9-9c58-3fb3244eb396 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.900849] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 915.900849] env[62627]: value = "task-2194503" [ 915.900849] env[62627]: _type = "Task" [ 915.900849] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.911559] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194503, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.939330] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194500, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212155} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.940204] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 915.940556] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 915.940882] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.145265] env[62627]: DEBUG nova.scheduler.client.report [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 916.296083] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194502, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18155} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.298326] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.298326] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.298326] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.411303] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194503, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.651777] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.731s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.652720] env[62627]: DEBUG nova.compute.manager [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 916.659388] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.146s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.666486] env[62627]: INFO nova.compute.claims [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 916.866906] env[62627]: DEBUG oslo_concurrency.lockutils [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "77c60059-535a-47ca-a5b0-45cc1df93b45" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.868586] env[62627]: DEBUG oslo_concurrency.lockutils [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "77c60059-535a-47ca-a5b0-45cc1df93b45" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.870238] env[62627]: DEBUG oslo_concurrency.lockutils [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "77c60059-535a-47ca-a5b0-45cc1df93b45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.870238] env[62627]: DEBUG oslo_concurrency.lockutils [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "77c60059-535a-47ca-a5b0-45cc1df93b45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.870238] env[62627]: DEBUG oslo_concurrency.lockutils [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "77c60059-535a-47ca-a5b0-45cc1df93b45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.872266] env[62627]: INFO nova.compute.manager [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Terminating instance [ 916.918060] env[62627]: DEBUG oslo_vmware.api [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194503, 'name': PowerOnVM_Task, 'duration_secs': 0.614459} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.918524] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 916.918712] env[62627]: INFO nova.compute.manager [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Took 9.85 seconds to spawn the instance on the hypervisor. [ 916.918898] env[62627]: DEBUG nova.compute.manager [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 916.919834] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144849d7-7eb6-41fe-b83d-f6393457b02c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.984829] env[62627]: DEBUG nova.virt.hardware [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 916.985076] env[62627]: DEBUG nova.virt.hardware [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 916.985076] env[62627]: DEBUG nova.virt.hardware [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 916.987788] env[62627]: DEBUG nova.virt.hardware [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 916.987788] env[62627]: DEBUG nova.virt.hardware [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 916.987788] env[62627]: DEBUG nova.virt.hardware [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 916.987788] env[62627]: DEBUG nova.virt.hardware [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 916.987788] env[62627]: DEBUG nova.virt.hardware [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 916.987788] env[62627]: DEBUG nova.virt.hardware [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 916.988894] env[62627]: DEBUG nova.virt.hardware [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 916.989360] env[62627]: DEBUG nova.virt.hardware [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 916.990344] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbcc6500-b65d-4302-af37-1e40eef50649 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.001314] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a324806-3518-4036-925a-7a57fafdd5e0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.019734] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:8d:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81a799c8-db8c-4bae-8609-d4bf5b56c600', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 917.028125] env[62627]: DEBUG oslo.service.loopingcall [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 917.028461] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 917.028693] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8d65211-4105-4d77-af63-f0256900fe07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.051347] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 917.051347] env[62627]: value = "task-2194504" [ 917.051347] env[62627]: _type = "Task" [ 917.051347] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.060781] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194504, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.173731] env[62627]: DEBUG nova.compute.utils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 917.178978] env[62627]: DEBUG nova.compute.manager [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 917.179599] env[62627]: DEBUG nova.network.neutron [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 917.304124] env[62627]: DEBUG nova.policy [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a895c08a57d4396ac7fa5f4ad8642a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d41b3fa69e2473fbea79b0ec52108fa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 917.339350] env[62627]: DEBUG nova.virt.hardware [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 917.339690] env[62627]: DEBUG nova.virt.hardware [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 917.339951] env[62627]: DEBUG nova.virt.hardware [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 917.340246] env[62627]: DEBUG nova.virt.hardware [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 917.340403] env[62627]: DEBUG nova.virt.hardware [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 917.340637] env[62627]: DEBUG nova.virt.hardware [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 917.340889] env[62627]: DEBUG nova.virt.hardware [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 917.341230] env[62627]: DEBUG nova.virt.hardware [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 917.341309] env[62627]: DEBUG nova.virt.hardware [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 917.341453] env[62627]: DEBUG nova.virt.hardware [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 917.341645] env[62627]: DEBUG nova.virt.hardware [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 917.342630] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932873bb-9d4e-4fce-9790-980a78cd01f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.356957] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f806408-19cf-4182-a947-e5c86ba799eb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.373894] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 917.383065] env[62627]: DEBUG oslo.service.loopingcall [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 917.386739] env[62627]: DEBUG nova.compute.manager [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 917.386971] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 917.387254] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 917.388157] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c04aa58-944d-4efc-97cc-3237bd7e13d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.391268] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca541077-6fe4-4119-a2aa-932f21eb7d5d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.417716] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.419257] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d745411e-fc42-48f0-ba60-e82516459e87 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.421284] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 917.421284] env[62627]: value = "task-2194505" [ 917.421284] env[62627]: _type = "Task" [ 917.421284] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.429503] env[62627]: DEBUG oslo_vmware.api [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 917.429503] env[62627]: value = "task-2194506" [ 917.429503] env[62627]: _type = "Task" [ 917.429503] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.437954] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194505, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.449044] env[62627]: INFO nova.compute.manager [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Took 38.42 seconds to build instance. [ 917.454941] env[62627]: DEBUG oslo_vmware.api [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194506, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.563229] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194504, 'name': CreateVM_Task, 'duration_secs': 0.400485} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.563478] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.564765] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.565041] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.565432] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 917.565722] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1aebe4f-2eed-4e84-b8fb-9f4f4d077567 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.572225] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 917.572225] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cfc06b-dc28-79d8-5a58-1a8779b26ca5" [ 917.572225] env[62627]: _type = "Task" [ 917.572225] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.583122] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cfc06b-dc28-79d8-5a58-1a8779b26ca5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.679419] env[62627]: DEBUG nova.compute.manager [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 917.850761] env[62627]: DEBUG oslo_concurrency.lockutils [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "aed62160-2db5-4008-a491-2b27ae81ef5f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.851016] env[62627]: DEBUG oslo_concurrency.lockutils [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "aed62160-2db5-4008-a491-2b27ae81ef5f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.851245] env[62627]: DEBUG oslo_concurrency.lockutils [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "aed62160-2db5-4008-a491-2b27ae81ef5f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.851438] env[62627]: DEBUG oslo_concurrency.lockutils [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "aed62160-2db5-4008-a491-2b27ae81ef5f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.851619] env[62627]: DEBUG oslo_concurrency.lockutils [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "aed62160-2db5-4008-a491-2b27ae81ef5f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.858612] env[62627]: INFO nova.compute.manager [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Terminating instance [ 917.908252] env[62627]: DEBUG nova.network.neutron [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Successfully created port: 62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.936578] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194505, 'name': CreateVM_Task, 'duration_secs': 0.340278} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.938728] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.939184] env[62627]: DEBUG oslo_concurrency.lockutils [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.946555] env[62627]: DEBUG oslo_vmware.api [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194506, 'name': PowerOffVM_Task, 'duration_secs': 0.252053} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.946837] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.947282] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 917.947282] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28868687-b482-4933-bea0-ed6bfb04b645 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.963054] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5d6a9703-b342-42a4-869a-af23a1d5a221 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.597s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.021889] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 918.022611] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 918.022611] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Deleting the datastore file [datastore2] 77c60059-535a-47ca-a5b0-45cc1df93b45 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 918.028524] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b203ec61-8e14-4592-96aa-dc0c9290a215 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.038339] env[62627]: DEBUG oslo_vmware.api [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for the task: (returnval){ [ 918.038339] env[62627]: value = "task-2194508" [ 918.038339] env[62627]: _type = "Task" [ 918.038339] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.051108] env[62627]: DEBUG oslo_vmware.api [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194508, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.084775] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cfc06b-dc28-79d8-5a58-1a8779b26ca5, 'name': SearchDatastore_Task, 'duration_secs': 0.014726} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.085110] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.085357] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 918.085608] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.085770] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.085921] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.086300] env[62627]: DEBUG oslo_concurrency.lockutils [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.086530] env[62627]: DEBUG oslo_concurrency.lockutils [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 918.086965] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5675e759-55d2-4aa8-8eb9-d9019f0d56ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.088937] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcfd1384-4ce9-4e32-8f51-fcb30633590a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.097183] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 918.097183] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5237f357-e2ff-e200-e941-cf6a68e617d9" [ 918.097183] env[62627]: _type = "Task" [ 918.097183] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.101649] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.101802] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 918.102859] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ddb234c-c08a-403f-baee-ff5b6635e6eb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.108707] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5237f357-e2ff-e200-e941-cf6a68e617d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.118037] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 918.118037] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522019cd-244b-146e-501a-19909c0917bc" [ 918.118037] env[62627]: _type = "Task" [ 918.118037] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.126276] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522019cd-244b-146e-501a-19909c0917bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.229853] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c662bea-16c5-4623-b74e-5e3694080012 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.242128] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed300095-cee0-4e6a-b40b-afe595f6c3c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.278655] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f86b8d-8e79-4969-a308-4f1793d739b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.287927] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844792a4-1bcf-4dad-8632-a413008c31a7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.307810] env[62627]: DEBUG nova.compute.provider_tree [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.365822] env[62627]: DEBUG nova.compute.manager [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 918.366086] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 918.367128] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4087b078-a8ef-4b46-a333-a16972b2c0bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.376121] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.376463] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b89dfa29-e0bc-4e29-804a-74d0d034c283 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.384901] env[62627]: DEBUG oslo_vmware.api [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 918.384901] env[62627]: value = "task-2194509" [ 918.384901] env[62627]: _type = "Task" [ 918.384901] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.396988] env[62627]: DEBUG oslo_vmware.api [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.551523] env[62627]: DEBUG oslo_vmware.api [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Task: {'id': task-2194508, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1947} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.551796] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.552042] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.552265] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.552500] env[62627]: INFO nova.compute.manager [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Took 1.17 seconds to destroy the instance on the hypervisor. [ 918.552764] env[62627]: DEBUG oslo.service.loopingcall [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.553053] env[62627]: DEBUG nova.compute.manager [-] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 918.553136] env[62627]: DEBUG nova.network.neutron [-] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 918.610845] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5237f357-e2ff-e200-e941-cf6a68e617d9, 'name': SearchDatastore_Task, 'duration_secs': 0.022817} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.611245] env[62627]: DEBUG oslo_concurrency.lockutils [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.611491] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 918.611709] env[62627]: DEBUG oslo_concurrency.lockutils [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.628536] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522019cd-244b-146e-501a-19909c0917bc, 'name': SearchDatastore_Task, 'duration_secs': 0.012464} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.629368] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1f8ec9f-0fe9-43c0-a2bc-ea3e65448308 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.636507] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 918.636507] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5261cfe1-6abd-3c52-2851-f37209caffd9" [ 918.636507] env[62627]: _type = "Task" [ 918.636507] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.649064] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5261cfe1-6abd-3c52-2851-f37209caffd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.697527] env[62627]: DEBUG nova.compute.manager [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 918.734165] env[62627]: DEBUG nova.virt.hardware [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 918.734676] env[62627]: DEBUG nova.virt.hardware [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.734957] env[62627]: DEBUG nova.virt.hardware [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.735448] env[62627]: DEBUG nova.virt.hardware [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.736031] env[62627]: DEBUG nova.virt.hardware [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.736031] env[62627]: DEBUG nova.virt.hardware [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 918.736215] env[62627]: DEBUG nova.virt.hardware [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 918.736279] env[62627]: DEBUG nova.virt.hardware [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 918.736397] env[62627]: DEBUG nova.virt.hardware [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 918.736564] env[62627]: DEBUG nova.virt.hardware [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 918.736757] env[62627]: DEBUG nova.virt.hardware [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 918.737738] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3920cf-0939-47f7-bdd6-72b00239454b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.747844] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0710d8fc-4620-41c9-bd0c-de3f961ca9e5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.814044] env[62627]: DEBUG nova.scheduler.client.report [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.898618] env[62627]: DEBUG oslo_vmware.api [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194509, 'name': PowerOffVM_Task, 'duration_secs': 0.330009} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.898917] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.899096] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 918.899629] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cac36f73-a11d-4544-b88c-8faf77f7d363 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.979882] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 918.980161] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 918.980352] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Deleting the datastore file [datastore2] aed62160-2db5-4008-a491-2b27ae81ef5f {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 918.980634] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e881855e-6262-409f-b21e-b1350a0bb73e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.990740] env[62627]: DEBUG oslo_vmware.api [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 918.990740] env[62627]: value = "task-2194511" [ 918.990740] env[62627]: _type = "Task" [ 918.990740] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.999140] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "84b8381c-bdd4-462e-aa98-0cc972f00882" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.999442] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.005873] env[62627]: DEBUG oslo_vmware.api [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194511, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.150051] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5261cfe1-6abd-3c52-2851-f37209caffd9, 'name': SearchDatastore_Task, 'duration_secs': 0.025547} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.150051] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.150051] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 919.150051] env[62627]: DEBUG oslo_concurrency.lockutils [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.150446] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 919.150446] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c3fbacf-8fce-412f-a2c9-06ee612c05a2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.152662] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0aae9b16-6a2e-4899-965c-80dad9e82332 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.160711] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 919.160711] env[62627]: value = "task-2194512" [ 919.160711] env[62627]: _type = "Task" [ 919.160711] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.165983] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 919.166190] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 919.167280] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f9022cf-73db-4962-b4a1-5deadc75e722 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.173180] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194512, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.177063] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 919.177063] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c97355-0008-ee77-5c1b-a1d40ad0073d" [ 919.177063] env[62627]: _type = "Task" [ 919.177063] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.185617] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c97355-0008-ee77-5c1b-a1d40ad0073d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.305504] env[62627]: DEBUG nova.compute.manager [req-7e3a7ec7-2b29-410b-b37e-87823d6571ab req-f19d106b-aa5d-4ace-b959-822fd1fe0018 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Received event network-vif-deleted-af7ffe26-3161-4ff9-abf9-eb8480443269 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 919.305959] env[62627]: INFO nova.compute.manager [req-7e3a7ec7-2b29-410b-b37e-87823d6571ab req-f19d106b-aa5d-4ace-b959-822fd1fe0018 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Neutron deleted interface af7ffe26-3161-4ff9-abf9-eb8480443269; detaching it from the instance and deleting it from the info cache [ 919.305959] env[62627]: DEBUG nova.network.neutron [req-7e3a7ec7-2b29-410b-b37e-87823d6571ab req-f19d106b-aa5d-4ace-b959-822fd1fe0018 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.325198] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.329082] env[62627]: DEBUG nova.compute.manager [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 919.331437] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.139s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.334032] env[62627]: INFO nova.compute.claims [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 919.418926] env[62627]: DEBUG nova.network.neutron [-] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.503701] env[62627]: DEBUG nova.compute.manager [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 919.506908] env[62627]: DEBUG oslo_vmware.api [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194511, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224134} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.507511] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 919.507736] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 919.507963] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 919.510065] env[62627]: INFO nova.compute.manager [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 919.510065] env[62627]: DEBUG oslo.service.loopingcall [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.510065] env[62627]: DEBUG nova.compute.manager [-] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 919.510065] env[62627]: DEBUG nova.network.neutron [-] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 919.531702] env[62627]: DEBUG nova.compute.manager [req-76365b32-9898-4b68-9203-378d43ef67a0 req-8b9f29f2-d5b6-4e15-9747-c4ca2c141f54 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Received event network-vif-plugged-62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 919.531940] env[62627]: DEBUG oslo_concurrency.lockutils [req-76365b32-9898-4b68-9203-378d43ef67a0 req-8b9f29f2-d5b6-4e15-9747-c4ca2c141f54 service nova] Acquiring lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.532247] env[62627]: DEBUG oslo_concurrency.lockutils [req-76365b32-9898-4b68-9203-378d43ef67a0 req-8b9f29f2-d5b6-4e15-9747-c4ca2c141f54 service nova] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.532625] env[62627]: DEBUG oslo_concurrency.lockutils [req-76365b32-9898-4b68-9203-378d43ef67a0 req-8b9f29f2-d5b6-4e15-9747-c4ca2c141f54 service nova] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.532826] env[62627]: DEBUG nova.compute.manager [req-76365b32-9898-4b68-9203-378d43ef67a0 req-8b9f29f2-d5b6-4e15-9747-c4ca2c141f54 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] No waiting events found dispatching network-vif-plugged-62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 919.532995] env[62627]: WARNING nova.compute.manager [req-76365b32-9898-4b68-9203-378d43ef67a0 req-8b9f29f2-d5b6-4e15-9747-c4ca2c141f54 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Received unexpected event network-vif-plugged-62b171c6-c991-4816-86e3-ba8a0862f911 for instance with vm_state building and task_state spawning. [ 919.643605] env[62627]: DEBUG nova.network.neutron [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Successfully updated port: 62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.676291] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194512, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.690439] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c97355-0008-ee77-5c1b-a1d40ad0073d, 'name': SearchDatastore_Task, 'duration_secs': 0.022277} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.691337] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a2c045e-21c2-4a23-8602-8449206c398a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.698177] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 919.698177] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5240a177-b14c-4aa7-63d7-3b776ad192e4" [ 919.698177] env[62627]: _type = "Task" [ 919.698177] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.708284] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5240a177-b14c-4aa7-63d7-3b776ad192e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.809833] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21412e2a-5fe8-485a-96d1-6ecd97e1eb35 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.820064] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1630b5-45a9-40c4-9b83-85b6e09a6e8b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.832769] env[62627]: DEBUG nova.compute.utils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 919.834346] env[62627]: DEBUG nova.compute.manager [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 919.834587] env[62627]: DEBUG nova.network.neutron [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 919.862897] env[62627]: DEBUG nova.compute.manager [req-7e3a7ec7-2b29-410b-b37e-87823d6571ab req-f19d106b-aa5d-4ace-b959-822fd1fe0018 service nova] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Detach interface failed, port_id=af7ffe26-3161-4ff9-abf9-eb8480443269, reason: Instance 77c60059-535a-47ca-a5b0-45cc1df93b45 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 919.889659] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.889954] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.907943] env[62627]: DEBUG nova.policy [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d350ccd054f442595877e0d2f2f773b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'af9e3552cc0543bf8dc5562e645493fc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 919.921926] env[62627]: INFO nova.compute.manager [-] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Took 1.37 seconds to deallocate network for instance. [ 920.032147] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.147070] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.147220] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.147476] env[62627]: DEBUG nova.network.neutron [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.175307] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194512, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557184} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.175683] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 920.175946] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 920.176192] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc582fc3-4bb1-44c9-b24e-c190f967a9d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.185757] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 920.185757] env[62627]: value = "task-2194513" [ 920.185757] env[62627]: _type = "Task" [ 920.185757] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.197251] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194513, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.209157] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5240a177-b14c-4aa7-63d7-3b776ad192e4, 'name': SearchDatastore_Task, 'duration_secs': 0.012358} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.209916] env[62627]: DEBUG oslo_concurrency.lockutils [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.210372] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 7804914c-7df8-45c2-a934-67aa73afb2e2/7804914c-7df8-45c2-a934-67aa73afb2e2.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 920.210669] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18ea953b-0bd3-481c-bb73-b5bd3c2eecc8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.219896] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 920.219896] env[62627]: value = "task-2194514" [ 920.219896] env[62627]: _type = "Task" [ 920.219896] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.229191] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194514, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.295384] env[62627]: DEBUG nova.network.neutron [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Successfully created port: 3dc09d47-bf2c-4769-96df-8bc30f2bb658 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.341586] env[62627]: DEBUG nova.compute.manager [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 920.394665] env[62627]: DEBUG nova.compute.utils [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 920.429214] env[62627]: DEBUG oslo_concurrency.lockutils [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.498836] env[62627]: DEBUG nova.network.neutron [-] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.701775] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194513, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144885} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.703067] env[62627]: DEBUG nova.network.neutron [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 920.705685] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.709917] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0451993d-fd65-4907-a14e-fd13f50d5843 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.737631] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.744516] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd0f15ec-7557-4fed-99d1-38159a6944ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.771282] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194514, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.774858] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 920.774858] env[62627]: value = "task-2194515" [ 920.774858] env[62627]: _type = "Task" [ 920.774858] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.782981] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194515, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.897636] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.976385] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00dd5dad-b92f-4e34-a71a-a876c28ba536 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.987260] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990b874c-f81e-4507-9eb4-c96b04c5dd6d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.022968] env[62627]: INFO nova.compute.manager [-] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Took 1.51 seconds to deallocate network for instance. [ 921.025938] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212d0e9a-dbd3-485c-9666-e3f21592aa1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.037342] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea0404c-f141-4ff1-8b74-5ce68add840b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.052369] env[62627]: DEBUG nova.compute.provider_tree [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.082547] env[62627]: DEBUG nova.network.neutron [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updating instance_info_cache with network_info: [{"id": "62b171c6-c991-4816-86e3-ba8a0862f911", "address": "fa:16:3e:d7:1a:59", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62b171c6-c9", "ovs_interfaceid": "62b171c6-c991-4816-86e3-ba8a0862f911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.239789] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194514, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.806715} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.240092] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 7804914c-7df8-45c2-a934-67aa73afb2e2/7804914c-7df8-45c2-a934-67aa73afb2e2.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 921.240309] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 921.240584] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13366de7-f7d7-47ba-93e4-a271c5d36b8d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.250031] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 921.250031] env[62627]: value = "task-2194516" [ 921.250031] env[62627]: _type = "Task" [ 921.250031] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.259837] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194516, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.284590] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194515, 'name': ReconfigVM_Task, 'duration_secs': 0.453914} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.284918] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.285658] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e839a75c-249b-4845-8218-12354c28eb8e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.298696] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 921.298696] env[62627]: value = "task-2194517" [ 921.298696] env[62627]: _type = "Task" [ 921.298696] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.306796] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194517, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.365024] env[62627]: DEBUG nova.compute.manager [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 921.387330] env[62627]: DEBUG nova.virt.hardware [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 921.387615] env[62627]: DEBUG nova.virt.hardware [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 921.387784] env[62627]: DEBUG nova.virt.hardware [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 921.387977] env[62627]: DEBUG nova.virt.hardware [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 921.388150] env[62627]: DEBUG nova.virt.hardware [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 921.388308] env[62627]: DEBUG nova.virt.hardware [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 921.388528] env[62627]: DEBUG nova.virt.hardware [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 921.388693] env[62627]: DEBUG nova.virt.hardware [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 921.388870] env[62627]: DEBUG nova.virt.hardware [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 921.389050] env[62627]: DEBUG nova.virt.hardware [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 921.389238] env[62627]: DEBUG nova.virt.hardware [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 921.390143] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae817b0b-258c-4434-a9b4-9dc0285e1bb1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.399258] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7ec9d2-ce41-458b-b778-a1c92426ce3d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.533923] env[62627]: DEBUG oslo_concurrency.lockutils [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.557987] env[62627]: DEBUG nova.scheduler.client.report [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 921.585777] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.586296] env[62627]: DEBUG nova.compute.manager [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Instance network_info: |[{"id": "62b171c6-c991-4816-86e3-ba8a0862f911", "address": "fa:16:3e:d7:1a:59", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62b171c6-c9", "ovs_interfaceid": "62b171c6-c991-4816-86e3-ba8a0862f911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 921.586909] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:1a:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc9714ff-7109-4ea1-9435-b2b3fbdb9e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62b171c6-c991-4816-86e3-ba8a0862f911', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.603512] env[62627]: DEBUG oslo.service.loopingcall [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 921.605750] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 921.606053] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f54475e-0733-4488-9747-107d1830e9d0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.628688] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.628688] env[62627]: value = "task-2194518" [ 921.628688] env[62627]: _type = "Task" [ 921.628688] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.637129] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194518, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.764998] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194516, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082839} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.765317] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 921.766328] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f4c9cd-31c4-4a3d-ad75-758a3037fa29 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.788068] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 7804914c-7df8-45c2-a934-67aa73afb2e2/7804914c-7df8-45c2-a934-67aa73afb2e2.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 921.788068] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f560a025-d136-4885-ac49-a3baf690c885 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.815391] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194517, 'name': Rename_Task, 'duration_secs': 0.151115} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.815391] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.815686] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 921.815686] env[62627]: value = "task-2194519" [ 921.815686] env[62627]: _type = "Task" [ 921.815686] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.815818] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f0c4eba6-32be-45cc-886d-ca1116792c95 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.831312] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194519, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.832891] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 921.832891] env[62627]: value = "task-2194520" [ 921.832891] env[62627]: _type = "Task" [ 921.832891] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.845467] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.949559] env[62627]: DEBUG nova.compute.manager [req-6327bfc9-81aa-472d-b361-ebc553c2bfda req-f612a284-b35c-48fe-a8e6-27ce076bd6d7 service nova] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Received event network-vif-deleted-7b046d10-1606-48ca-9f84-f34e991b5dd5 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 921.976348] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.976619] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.976952] env[62627]: INFO nova.compute.manager [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Attaching volume d70a21ef-38af-445d-bae6-087172ff59dc to /dev/sdb [ 922.021254] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d9221b-4acf-46aa-8f88-8a7090b52fa0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.030547] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5887dea5-e6c0-4aca-bd9f-be7fd7a7f705 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.047629] env[62627]: DEBUG nova.virt.block_device [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Updating existing volume attachment record: 66143736-77e8-47d6-80ce-ac4812a4c01e {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 922.065612] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.066171] env[62627]: DEBUG nova.compute.manager [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 922.068988] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.790s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.070379] env[62627]: INFO nova.compute.claims [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 922.116773] env[62627]: DEBUG nova.compute.manager [req-15624e55-8d59-4164-91ce-09a10b8c0bb0 req-3c558849-c5ac-47f7-95bb-36a9707264d7 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Received event network-changed-62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 922.117010] env[62627]: DEBUG nova.compute.manager [req-15624e55-8d59-4164-91ce-09a10b8c0bb0 req-3c558849-c5ac-47f7-95bb-36a9707264d7 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Refreshing instance network info cache due to event network-changed-62b171c6-c991-4816-86e3-ba8a0862f911. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 922.117248] env[62627]: DEBUG oslo_concurrency.lockutils [req-15624e55-8d59-4164-91ce-09a10b8c0bb0 req-3c558849-c5ac-47f7-95bb-36a9707264d7 service nova] Acquiring lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.117388] env[62627]: DEBUG oslo_concurrency.lockutils [req-15624e55-8d59-4164-91ce-09a10b8c0bb0 req-3c558849-c5ac-47f7-95bb-36a9707264d7 service nova] Acquired lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.117549] env[62627]: DEBUG nova.network.neutron [req-15624e55-8d59-4164-91ce-09a10b8c0bb0 req-3c558849-c5ac-47f7-95bb-36a9707264d7 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Refreshing network info cache for port 62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 922.126183] env[62627]: DEBUG oslo_vmware.rw_handles [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52306819-2e65-0a65-7c88-011b75f0be1a/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 922.128796] env[62627]: DEBUG nova.network.neutron [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Successfully updated port: 3dc09d47-bf2c-4769-96df-8bc30f2bb658 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.128796] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8703512-c4de-4a73-8bcf-0ea59cc0a83c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.145235] env[62627]: DEBUG oslo_vmware.rw_handles [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52306819-2e65-0a65-7c88-011b75f0be1a/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 922.145418] env[62627]: ERROR oslo_vmware.rw_handles [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52306819-2e65-0a65-7c88-011b75f0be1a/disk-0.vmdk due to incomplete transfer. [ 922.145627] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194518, 'name': CreateVM_Task, 'duration_secs': 0.403473} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.146342] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0f7c20ac-0430-40e8-98c3-88578ef51412 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.147893] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.148777] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.148987] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.149549] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 922.149876] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-785ca8b9-af04-4514-99ee-80f9c0430a8e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.155810] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 922.155810] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a77823-884f-e5f5-0ab9-9f5a754c1f09" [ 922.155810] env[62627]: _type = "Task" [ 922.155810] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.157506] env[62627]: DEBUG oslo_vmware.rw_handles [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52306819-2e65-0a65-7c88-011b75f0be1a/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 922.157703] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Uploaded image 9a6a372d-1423-4d9e-9642-a4a246be11f7 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 922.160134] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 922.164063] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-63ae14f2-8562-422d-b9ac-7acf1a8fc022 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.173340] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a77823-884f-e5f5-0ab9-9f5a754c1f09, 'name': SearchDatastore_Task, 'duration_secs': 0.010633} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.174715] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.174949] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.175203] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.175343] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.175533] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.175846] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 922.175846] env[62627]: value = "task-2194521" [ 922.175846] env[62627]: _type = "Task" [ 922.175846] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.176500] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51830a92-4555-4428-b948-4fd6bdf373c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.187017] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194521, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.188168] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.188355] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 922.189373] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-092901b1-e86c-4e4b-bc71-03083493f237 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.194990] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 922.194990] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522975a2-6312-65f2-77b8-526b589854c6" [ 922.194990] env[62627]: _type = "Task" [ 922.194990] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.208498] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522975a2-6312-65f2-77b8-526b589854c6, 'name': SearchDatastore_Task} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.209231] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7435b0a8-eb21-46f5-8c63-bed64fc0c7ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.214808] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 922.214808] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52607580-5c51-c7ec-b840-b928900b4a55" [ 922.214808] env[62627]: _type = "Task" [ 922.214808] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.223336] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52607580-5c51-c7ec-b840-b928900b4a55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.329693] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194519, 'name': ReconfigVM_Task, 'duration_secs': 0.398086} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.330061] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 7804914c-7df8-45c2-a934-67aa73afb2e2/7804914c-7df8-45c2-a934-67aa73afb2e2.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 922.330756] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7579812-ad29-4477-adf3-0b15385bbf06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.339961] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 922.339961] env[62627]: value = "task-2194523" [ 922.339961] env[62627]: _type = "Task" [ 922.339961] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.347474] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194520, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.353668] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194523, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.575946] env[62627]: DEBUG nova.compute.utils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 922.582080] env[62627]: DEBUG nova.compute.manager [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 922.584116] env[62627]: DEBUG nova.network.neutron [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 922.637826] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquiring lock "refresh_cache-66882603-6ff6-49c2-9f42-3521ecaf98b2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.637826] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquired lock "refresh_cache-66882603-6ff6-49c2-9f42-3521ecaf98b2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.637826] env[62627]: DEBUG nova.network.neutron [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 922.658575] env[62627]: DEBUG nova.policy [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d067f5bbaa846048d3e9e3d857310a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6ad7b328aba44b51aec9455bada6678c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 922.688894] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194521, 'name': Destroy_Task, 'duration_secs': 0.336046} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.689237] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Destroyed the VM [ 922.689444] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 922.689792] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-379dcc6a-02bc-4e16-8654-df0346344aa8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.697239] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 922.697239] env[62627]: value = "task-2194526" [ 922.697239] env[62627]: _type = "Task" [ 922.697239] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.705953] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194526, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.729468] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52607580-5c51-c7ec-b840-b928900b4a55, 'name': SearchDatastore_Task, 'duration_secs': 0.010261} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.729768] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.730035] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 457e5aa6-d229-4f48-8817-e918ec4bd0f9/457e5aa6-d229-4f48-8817-e918ec4bd0f9.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 922.730300] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f933b98-4631-467b-9468-2e09da48935d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.738190] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 922.738190] env[62627]: value = "task-2194527" [ 922.738190] env[62627]: _type = "Task" [ 922.738190] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.747760] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194527, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.852628] env[62627]: DEBUG oslo_vmware.api [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194520, 'name': PowerOnVM_Task, 'duration_secs': 0.550661} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.854095] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.854432] env[62627]: DEBUG nova.compute.manager [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.855037] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194523, 'name': Rename_Task, 'duration_secs': 0.315818} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.855912] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da817c3f-e74a-448e-88df-9b39a45314b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.858997] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 922.860252] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ea21276-1495-4213-8fe3-3ff3a91c6d0d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.869367] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 922.869367] env[62627]: value = "task-2194528" [ 922.869367] env[62627]: _type = "Task" [ 922.869367] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.878446] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194528, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.925463] env[62627]: DEBUG nova.network.neutron [req-15624e55-8d59-4164-91ce-09a10b8c0bb0 req-3c558849-c5ac-47f7-95bb-36a9707264d7 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updated VIF entry in instance network info cache for port 62b171c6-c991-4816-86e3-ba8a0862f911. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.925463] env[62627]: DEBUG nova.network.neutron [req-15624e55-8d59-4164-91ce-09a10b8c0bb0 req-3c558849-c5ac-47f7-95bb-36a9707264d7 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updating instance_info_cache with network_info: [{"id": "62b171c6-c991-4816-86e3-ba8a0862f911", "address": "fa:16:3e:d7:1a:59", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62b171c6-c9", "ovs_interfaceid": "62b171c6-c991-4816-86e3-ba8a0862f911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.035578] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "f6357f01-d7cb-4477-be2d-98fc17d7a940" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.035844] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "f6357f01-d7cb-4477-be2d-98fc17d7a940" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.079984] env[62627]: DEBUG nova.compute.manager [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 923.161714] env[62627]: DEBUG nova.network.neutron [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Successfully created port: e7de9fde-1309-4a6a-bbb5-537333d9e42c {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 923.205021] env[62627]: DEBUG nova.network.neutron [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.214594] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194526, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.255029] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194527, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486058} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.255250] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 457e5aa6-d229-4f48-8817-e918ec4bd0f9/457e5aa6-d229-4f48-8817-e918ec4bd0f9.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 923.255491] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 923.255768] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-10b63b52-369d-47f6-9fd7-132319eb9c1d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.263564] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 923.263564] env[62627]: value = "task-2194529" [ 923.263564] env[62627]: _type = "Task" [ 923.263564] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.276032] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.390394] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.395249] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194528, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.427757] env[62627]: DEBUG oslo_concurrency.lockutils [req-15624e55-8d59-4164-91ce-09a10b8c0bb0 req-3c558849-c5ac-47f7-95bb-36a9707264d7 service nova] Releasing lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.438282] env[62627]: DEBUG nova.network.neutron [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Updating instance_info_cache with network_info: [{"id": "3dc09d47-bf2c-4769-96df-8bc30f2bb658", "address": "fa:16:3e:56:e9:8b", "network": {"id": "45ff759a-f1c7-4241-a5ca-571f2d5ba6b7", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1755040165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af9e3552cc0543bf8dc5562e645493fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc09d47-bf", "ovs_interfaceid": "3dc09d47-bf2c-4769-96df-8bc30f2bb658", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.558660] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.558926] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.559153] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.559340] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.559506] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.564860] env[62627]: INFO nova.compute.manager [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Terminating instance [ 923.640922] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d4edd4-cb71-4abc-9a73-3c29fc460051 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.648238] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6c54cc-66da-4fcf-b161-4faf6254a356 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.679532] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27897899-0a6d-471d-a4b1-acd24da7e7bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.687454] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dbfa6a-a0d5-44c6-9f4c-e37df7631df1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.703871] env[62627]: DEBUG nova.compute.provider_tree [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.713404] env[62627]: DEBUG oslo_vmware.api [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194526, 'name': RemoveSnapshot_Task, 'duration_secs': 0.644742} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.713830] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 923.714241] env[62627]: INFO nova.compute.manager [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Took 13.38 seconds to snapshot the instance on the hypervisor. [ 923.772926] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068072} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.773216] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 923.773989] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764289f6-f83a-40a3-ab9c-878c09db0083 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.795611] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 457e5aa6-d229-4f48-8817-e918ec4bd0f9/457e5aa6-d229-4f48-8817-e918ec4bd0f9.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 923.795863] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-100b4458-3c95-427e-a073-10bc8ccdbdbc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.818047] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 923.818047] env[62627]: value = "task-2194530" [ 923.818047] env[62627]: _type = "Task" [ 923.818047] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.826626] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194530, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.886787] env[62627]: DEBUG oslo_vmware.api [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194528, 'name': PowerOnVM_Task, 'duration_secs': 0.733492} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.887231] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 923.887231] env[62627]: DEBUG nova.compute.manager [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 923.888048] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8873200-d256-40ba-bbc0-30efe247c9e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.942089] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Releasing lock "refresh_cache-66882603-6ff6-49c2-9f42-3521ecaf98b2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.942089] env[62627]: DEBUG nova.compute.manager [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Instance network_info: |[{"id": "3dc09d47-bf2c-4769-96df-8bc30f2bb658", "address": "fa:16:3e:56:e9:8b", "network": {"id": "45ff759a-f1c7-4241-a5ca-571f2d5ba6b7", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1755040165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af9e3552cc0543bf8dc5562e645493fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc09d47-bf", "ovs_interfaceid": "3dc09d47-bf2c-4769-96df-8bc30f2bb658", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 923.943073] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:e9:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dc09d47-bf2c-4769-96df-8bc30f2bb658', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 923.951439] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Creating folder: Project (af9e3552cc0543bf8dc5562e645493fc). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 923.952117] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-198bfcb6-25fa-4311-a38b-1f712f2d4d39 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.963183] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Created folder: Project (af9e3552cc0543bf8dc5562e645493fc) in parent group-v447541. [ 923.963482] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Creating folder: Instances. Parent ref: group-v447690. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 923.963785] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3eb38dc6-46b9-4190-9a8b-9d644a579018 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.973467] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Created folder: Instances in parent group-v447690. [ 923.973761] env[62627]: DEBUG oslo.service.loopingcall [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 923.973994] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 923.974304] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01e27103-e64d-4467-a841-e644cbb8b3c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.996135] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 923.996135] env[62627]: value = "task-2194533" [ 923.996135] env[62627]: _type = "Task" [ 923.996135] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.004189] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194533, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.074859] env[62627]: DEBUG nova.compute.manager [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 924.075167] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 924.076091] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbe09a4-1eed-41e2-8d38-4af5a1f258bd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.084315] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 924.084664] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-caf1cd34-84d0-4168-a9b4-ef8df9f81dec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.093713] env[62627]: DEBUG nova.compute.manager [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 924.117153] env[62627]: DEBUG nova.virt.hardware [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 924.117898] env[62627]: DEBUG nova.virt.hardware [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.117898] env[62627]: DEBUG nova.virt.hardware [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 924.117898] env[62627]: DEBUG nova.virt.hardware [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.117898] env[62627]: DEBUG nova.virt.hardware [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 924.118093] env[62627]: DEBUG nova.virt.hardware [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 924.118735] env[62627]: DEBUG nova.virt.hardware [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 924.118735] env[62627]: DEBUG nova.virt.hardware [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 924.118735] env[62627]: DEBUG nova.virt.hardware [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 924.118735] env[62627]: DEBUG nova.virt.hardware [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 924.118945] env[62627]: DEBUG nova.virt.hardware [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 924.119801] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dd38ee-eb47-4791-856a-e19bcbfa008b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.129143] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7558c1e7-dde7-4227-9c74-d35959641aad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.153249] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 924.153541] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 924.153657] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleting the datastore file [datastore1] e76bdb57-4a30-4222-ad4c-d95e4e6193cb {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 924.154080] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b0200e0-0211-49b2-924b-c00dd2e0dff4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.162590] env[62627]: DEBUG oslo_vmware.api [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 924.162590] env[62627]: value = "task-2194535" [ 924.162590] env[62627]: _type = "Task" [ 924.162590] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.171839] env[62627]: DEBUG oslo_vmware.api [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.207686] env[62627]: DEBUG nova.scheduler.client.report [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.218991] env[62627]: DEBUG nova.compute.manager [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Instance disappeared during snapshot {{(pid=62627) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 924.233845] env[62627]: DEBUG nova.compute.manager [None req-bb3fe9f1-7a2b-4927-9609-df489b9d7bcf tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image not found during clean up 9a6a372d-1423-4d9e-9642-a4a246be11f7 {{(pid=62627) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 924.240046] env[62627]: DEBUG nova.compute.manager [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Received event network-vif-plugged-3dc09d47-bf2c-4769-96df-8bc30f2bb658 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 924.240306] env[62627]: DEBUG oslo_concurrency.lockutils [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] Acquiring lock "66882603-6ff6-49c2-9f42-3521ecaf98b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.240517] env[62627]: DEBUG oslo_concurrency.lockutils [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] Lock "66882603-6ff6-49c2-9f42-3521ecaf98b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.240681] env[62627]: DEBUG oslo_concurrency.lockutils [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] Lock "66882603-6ff6-49c2-9f42-3521ecaf98b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.240863] env[62627]: DEBUG nova.compute.manager [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] No waiting events found dispatching network-vif-plugged-3dc09d47-bf2c-4769-96df-8bc30f2bb658 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 924.241036] env[62627]: WARNING nova.compute.manager [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Received unexpected event network-vif-plugged-3dc09d47-bf2c-4769-96df-8bc30f2bb658 for instance with vm_state building and task_state spawning. [ 924.241218] env[62627]: DEBUG nova.compute.manager [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Received event network-changed-3dc09d47-bf2c-4769-96df-8bc30f2bb658 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 924.241350] env[62627]: DEBUG nova.compute.manager [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Refreshing instance network info cache due to event network-changed-3dc09d47-bf2c-4769-96df-8bc30f2bb658. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 924.241537] env[62627]: DEBUG oslo_concurrency.lockutils [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] Acquiring lock "refresh_cache-66882603-6ff6-49c2-9f42-3521ecaf98b2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.241679] env[62627]: DEBUG oslo_concurrency.lockutils [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] Acquired lock "refresh_cache-66882603-6ff6-49c2-9f42-3521ecaf98b2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.241835] env[62627]: DEBUG nova.network.neutron [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Refreshing network info cache for port 3dc09d47-bf2c-4769-96df-8bc30f2bb658 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 924.327492] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194530, 'name': ReconfigVM_Task, 'duration_secs': 0.291011} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.328219] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 457e5aa6-d229-4f48-8817-e918ec4bd0f9/457e5aa6-d229-4f48-8817-e918ec4bd0f9.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 924.330916] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ad16790-9786-4331-9b1f-2a3f351e1c88 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.335722] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.335850] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.341346] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 924.341346] env[62627]: value = "task-2194536" [ 924.341346] env[62627]: _type = "Task" [ 924.341346] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.349640] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194536, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.406657] env[62627]: DEBUG oslo_concurrency.lockutils [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.507373] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194533, 'name': CreateVM_Task, 'duration_secs': 0.377492} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.507597] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 924.508292] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.508455] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.508797] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 924.509057] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddbe160f-1aee-464f-bd24-d9cc99b64f16 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.514740] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for the task: (returnval){ [ 924.514740] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5254b5ef-6764-6577-174d-9af91d1e13c0" [ 924.514740] env[62627]: _type = "Task" [ 924.514740] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.523475] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5254b5ef-6764-6577-174d-9af91d1e13c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.649114] env[62627]: INFO nova.compute.manager [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Rebuilding instance [ 924.678510] env[62627]: DEBUG oslo_vmware.api [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194535, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202655} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.679041] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 924.679041] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 924.679177] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 924.679326] env[62627]: INFO nova.compute.manager [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Took 0.60 seconds to destroy the instance on the hypervisor. [ 924.679744] env[62627]: DEBUG oslo.service.loopingcall [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.679744] env[62627]: DEBUG nova.compute.manager [-] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 924.679832] env[62627]: DEBUG nova.network.neutron [-] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 924.691050] env[62627]: DEBUG nova.compute.manager [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 924.691873] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b83a075-05e8-407e-9aea-b4aa6f39be0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.713727] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.713833] env[62627]: DEBUG nova.compute.manager [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 924.718634] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.793s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.718634] env[62627]: DEBUG nova.objects.instance [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lazy-loading 'resources' on Instance uuid 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.763644] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquiring lock "7804914c-7df8-45c2-a934-67aa73afb2e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.763883] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "7804914c-7df8-45c2-a934-67aa73afb2e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.764204] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquiring lock "7804914c-7df8-45c2-a934-67aa73afb2e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.764389] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "7804914c-7df8-45c2-a934-67aa73afb2e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.764620] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "7804914c-7df8-45c2-a934-67aa73afb2e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.769080] env[62627]: INFO nova.compute.manager [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Terminating instance [ 924.851836] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194536, 'name': Rename_Task, 'duration_secs': 0.15952} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.852298] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 924.852584] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a2441b28-a1b1-4213-9137-c76e600b3ea2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.860130] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 924.860130] env[62627]: value = "task-2194538" [ 924.860130] env[62627]: _type = "Task" [ 924.860130] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.871443] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194538, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.030217] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5254b5ef-6764-6577-174d-9af91d1e13c0, 'name': SearchDatastore_Task, 'duration_secs': 0.013041} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.030217] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.030217] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.030582] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.030582] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.030648] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.031194] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4f2f188-1e40-4f39-9f88-ef03094ba428 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.039675] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.039871] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 925.040664] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f53185da-4e3e-43b5-a9a7-f9fa9fb40f92 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.046172] env[62627]: DEBUG nova.compute.manager [req-f354bfc7-3ccc-4efc-8717-4eeaddb42650 req-84e114d0-eacd-442f-ad97-3e7c93c1ad9a service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Received event network-vif-deleted-993b71f3-f13a-4db1-b2b7-2da39bd4d41e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 925.046420] env[62627]: INFO nova.compute.manager [req-f354bfc7-3ccc-4efc-8717-4eeaddb42650 req-84e114d0-eacd-442f-ad97-3e7c93c1ad9a service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Neutron deleted interface 993b71f3-f13a-4db1-b2b7-2da39bd4d41e; detaching it from the instance and deleting it from the info cache [ 925.046595] env[62627]: DEBUG nova.network.neutron [req-f354bfc7-3ccc-4efc-8717-4eeaddb42650 req-84e114d0-eacd-442f-ad97-3e7c93c1ad9a service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.051293] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for the task: (returnval){ [ 925.051293] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5289cfb6-8eb2-87d1-b220-a5c7250edf8f" [ 925.051293] env[62627]: _type = "Task" [ 925.051293] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.068226] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5289cfb6-8eb2-87d1-b220-a5c7250edf8f, 'name': SearchDatastore_Task, 'duration_secs': 0.010996} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.068226] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-775a9808-ffa2-436e-924e-36551e8372b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.071918] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for the task: (returnval){ [ 925.071918] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525374df-4e90-801e-d2c3-8899e0cf082a" [ 925.071918] env[62627]: _type = "Task" [ 925.071918] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.074118] env[62627]: DEBUG nova.network.neutron [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Successfully updated port: e7de9fde-1309-4a6a-bbb5-537333d9e42c {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 925.084666] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525374df-4e90-801e-d2c3-8899e0cf082a, 'name': SearchDatastore_Task, 'duration_secs': 0.008487} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.087024] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.087274] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 66882603-6ff6-49c2-9f42-3521ecaf98b2/66882603-6ff6-49c2-9f42-3521ecaf98b2.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 925.089046] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13a67db3-af15-4f3c-acd0-ccebb7973d6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.097030] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for the task: (returnval){ [ 925.097030] env[62627]: value = "task-2194539" [ 925.097030] env[62627]: _type = "Task" [ 925.097030] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.105893] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194539, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.146464] env[62627]: DEBUG nova.network.neutron [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Updated VIF entry in instance network info cache for port 3dc09d47-bf2c-4769-96df-8bc30f2bb658. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 925.146902] env[62627]: DEBUG nova.network.neutron [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Updating instance_info_cache with network_info: [{"id": "3dc09d47-bf2c-4769-96df-8bc30f2bb658", "address": "fa:16:3e:56:e9:8b", "network": {"id": "45ff759a-f1c7-4241-a5ca-571f2d5ba6b7", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1755040165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af9e3552cc0543bf8dc5562e645493fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc09d47-bf", "ovs_interfaceid": "3dc09d47-bf2c-4769-96df-8bc30f2bb658", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.223136] env[62627]: DEBUG nova.compute.utils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 925.227327] env[62627]: DEBUG nova.compute.manager [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 925.227466] env[62627]: DEBUG nova.network.neutron [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 925.274695] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquiring lock "refresh_cache-7804914c-7df8-45c2-a934-67aa73afb2e2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.274695] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquired lock "refresh_cache-7804914c-7df8-45c2-a934-67aa73afb2e2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.274695] env[62627]: DEBUG nova.network.neutron [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 925.301786] env[62627]: DEBUG nova.policy [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8234eaf71dd04835a2e03794caf95aaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ddc2513b33ec44b995cbd09fdf957495', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 925.370954] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194538, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.500266] env[62627]: DEBUG nova.network.neutron [-] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.549675] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f0bae4d-e89a-4bd4-a074-de40d29e44a3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.560395] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20facb97-e0b8-48b5-a2cf-40663de869fe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.579105] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.579474] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.579474] env[62627]: DEBUG nova.network.neutron [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 925.604259] env[62627]: DEBUG nova.compute.manager [req-f354bfc7-3ccc-4efc-8717-4eeaddb42650 req-84e114d0-eacd-442f-ad97-3e7c93c1ad9a service nova] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Detach interface failed, port_id=993b71f3-f13a-4db1-b2b7-2da39bd4d41e, reason: Instance e76bdb57-4a30-4222-ad4c-d95e4e6193cb could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 925.618453] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194539, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.652872] env[62627]: DEBUG oslo_concurrency.lockutils [req-c299122b-b5b9-4ee0-a76f-8966f8ba1168 req-00c9c825-46b7-4735-9a53-de40ad83102c service nova] Releasing lock "refresh_cache-66882603-6ff6-49c2-9f42-3521ecaf98b2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.704809] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.705230] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d62b515-04cb-42cc-91d2-ce9bb69c97af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.712254] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 925.712254] env[62627]: value = "task-2194540" [ 925.712254] env[62627]: _type = "Task" [ 925.712254] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.723506] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194540, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.728217] env[62627]: DEBUG nova.compute.manager [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 925.804964] env[62627]: DEBUG nova.network.neutron [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 925.867195] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a9e7ae-a9ea-4f5d-82f6-8eb0bde93789 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.873045] env[62627]: DEBUG oslo_vmware.api [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194538, 'name': PowerOnVM_Task, 'duration_secs': 0.544449} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.873670] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 925.873879] env[62627]: INFO nova.compute.manager [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Took 7.18 seconds to spawn the instance on the hypervisor. [ 925.874066] env[62627]: DEBUG nova.compute.manager [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 925.874913] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de03d36-60b1-4f4b-8193-2525b41fe2c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.881213] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3624557e-bad7-4920-aebf-8045284285a7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.930350] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869abbd8-9216-4001-a100-4429e22bbb94 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.939029] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe95bf9c-668c-4a5c-af04-446466b2fcad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.955439] env[62627]: DEBUG nova.compute.provider_tree [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.003592] env[62627]: INFO nova.compute.manager [-] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Took 1.32 seconds to deallocate network for instance. [ 926.120849] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194539, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655591} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.121431] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 66882603-6ff6-49c2-9f42-3521ecaf98b2/66882603-6ff6-49c2-9f42-3521ecaf98b2.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 926.121827] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 926.122208] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f9b2eed-2d9e-4bb5-8750-015c07f6c1eb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.130607] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for the task: (returnval){ [ 926.130607] env[62627]: value = "task-2194541" [ 926.130607] env[62627]: _type = "Task" [ 926.130607] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.138694] env[62627]: DEBUG nova.network.neutron [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 926.142579] env[62627]: DEBUG nova.network.neutron [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.149212] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194541, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.204075] env[62627]: DEBUG nova.network.neutron [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Successfully created port: 7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 926.225664] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194540, 'name': PowerOffVM_Task, 'duration_secs': 0.50456} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.225987] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 926.226261] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 926.228023] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a93ac29-f850-4c52-9a65-3e611dc2dbeb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.244923] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 926.245243] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66a76e08-be89-4c1d-b7bd-5c4aa4c75827 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.312749] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 926.313059] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 926.313253] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleting the datastore file [datastore2] 7366e4b0-bdba-435d-844d-8e81c4464b31 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 926.318418] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff144073-161d-4169-8253-ce4cdb8f5327 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.324163] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 926.324163] env[62627]: value = "task-2194543" [ 926.324163] env[62627]: _type = "Task" [ 926.324163] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.334489] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194543, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.338218] env[62627]: DEBUG nova.compute.manager [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Received event network-vif-plugged-e7de9fde-1309-4a6a-bbb5-537333d9e42c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 926.338218] env[62627]: DEBUG oslo_concurrency.lockutils [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] Acquiring lock "38ab16cc-be4f-47b4-a818-ea864e853872-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.338218] env[62627]: DEBUG oslo_concurrency.lockutils [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] Lock "38ab16cc-be4f-47b4-a818-ea864e853872-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.338218] env[62627]: DEBUG oslo_concurrency.lockutils [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] Lock "38ab16cc-be4f-47b4-a818-ea864e853872-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.338616] env[62627]: DEBUG nova.compute.manager [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] No waiting events found dispatching network-vif-plugged-e7de9fde-1309-4a6a-bbb5-537333d9e42c {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 926.338800] env[62627]: WARNING nova.compute.manager [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Received unexpected event network-vif-plugged-e7de9fde-1309-4a6a-bbb5-537333d9e42c for instance with vm_state building and task_state spawning. [ 926.339116] env[62627]: DEBUG nova.compute.manager [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Received event network-changed-e7de9fde-1309-4a6a-bbb5-537333d9e42c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 926.339220] env[62627]: DEBUG nova.compute.manager [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Refreshing instance network info cache due to event network-changed-e7de9fde-1309-4a6a-bbb5-537333d9e42c. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 926.339459] env[62627]: DEBUG oslo_concurrency.lockutils [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] Acquiring lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.364091] env[62627]: DEBUG nova.network.neutron [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Updating instance_info_cache with network_info: [{"id": "e7de9fde-1309-4a6a-bbb5-537333d9e42c", "address": "fa:16:3e:e9:d1:53", "network": {"id": "6868e7f7-6e86-4ef3-a3f2-82b652203315", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-729196523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ad7b328aba44b51aec9455bada6678c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7de9fde-13", "ovs_interfaceid": "e7de9fde-1309-4a6a-bbb5-537333d9e42c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.406032] env[62627]: INFO nova.compute.manager [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Took 35.88 seconds to build instance. [ 926.459799] env[62627]: DEBUG nova.scheduler.client.report [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 926.510825] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.639904] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194541, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069599} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.640183] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 926.640996] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2216675f-e0b6-4fbd-ac43-2e1af614cf2d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.655610] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Releasing lock "refresh_cache-7804914c-7df8-45c2-a934-67aa73afb2e2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.656036] env[62627]: DEBUG nova.compute.manager [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 926.656232] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 926.665729] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 66882603-6ff6-49c2-9f42-3521ecaf98b2/66882603-6ff6-49c2-9f42-3521ecaf98b2.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.665910] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac7dffe-6e0e-4e3b-a43a-a4fc7f57fe3b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.668705] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d0a2e2b-63d2-4418-a1db-0dce9b1180e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.688854] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 926.689915] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcf06b9f-50f4-4c36-96f2-6b1e84428c68 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.691772] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for the task: (returnval){ [ 926.691772] env[62627]: value = "task-2194544" [ 926.691772] env[62627]: _type = "Task" [ 926.691772] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.697929] env[62627]: DEBUG oslo_vmware.api [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 926.697929] env[62627]: value = "task-2194545" [ 926.697929] env[62627]: _type = "Task" [ 926.697929] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.703562] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194544, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.712156] env[62627]: DEBUG oslo_vmware.api [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194545, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.741544] env[62627]: DEBUG nova.compute.manager [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 926.768062] env[62627]: DEBUG nova.virt.hardware [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 926.768062] env[62627]: DEBUG nova.virt.hardware [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.768347] env[62627]: DEBUG nova.virt.hardware [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 926.768519] env[62627]: DEBUG nova.virt.hardware [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.768713] env[62627]: DEBUG nova.virt.hardware [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 926.768888] env[62627]: DEBUG nova.virt.hardware [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 926.769152] env[62627]: DEBUG nova.virt.hardware [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 926.769351] env[62627]: DEBUG nova.virt.hardware [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 926.769529] env[62627]: DEBUG nova.virt.hardware [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 926.769711] env[62627]: DEBUG nova.virt.hardware [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 926.769899] env[62627]: DEBUG nova.virt.hardware [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 926.771071] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44fa31a-72c7-4dfb-910f-f92c45a8fc1d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.780037] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0abd789-2292-46ec-813c-b44a4cb89a0b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.837632] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194543, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.867127] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Releasing lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.867487] env[62627]: DEBUG nova.compute.manager [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Instance network_info: |[{"id": "e7de9fde-1309-4a6a-bbb5-537333d9e42c", "address": "fa:16:3e:e9:d1:53", "network": {"id": "6868e7f7-6e86-4ef3-a3f2-82b652203315", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-729196523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ad7b328aba44b51aec9455bada6678c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7de9fde-13", "ovs_interfaceid": "e7de9fde-1309-4a6a-bbb5-537333d9e42c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 926.873483] env[62627]: DEBUG oslo_concurrency.lockutils [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] Acquired lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.873483] env[62627]: DEBUG nova.network.neutron [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Refreshing network info cache for port e7de9fde-1309-4a6a-bbb5-537333d9e42c {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.873483] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:d1:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e7de9fde-1309-4a6a-bbb5-537333d9e42c', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.877746] env[62627]: DEBUG oslo.service.loopingcall [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 926.879347] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.879347] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cda5423-7957-4080-8927-0c55c4d7a82b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.901687] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.901687] env[62627]: value = "task-2194546" [ 926.901687] env[62627]: _type = "Task" [ 926.901687] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.913025] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d1c57c5a-4b95-4f26-9738-2cd863406721 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.968s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.913025] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194546, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.967408] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.249s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.970263] env[62627]: DEBUG oslo_concurrency.lockutils [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.026s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.970588] env[62627]: DEBUG nova.objects.instance [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lazy-loading 'resources' on Instance uuid 02216be1-cdb9-421a-8a23-4a919b376595 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.991102] env[62627]: INFO nova.scheduler.client.report [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Deleted allocations for instance 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8 [ 927.112017] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 927.112280] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447689', 'volume_id': 'd70a21ef-38af-445d-bae6-087172ff59dc', 'name': 'volume-d70a21ef-38af-445d-bae6-087172ff59dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd0786233-087b-4000-8c37-fd5c55cc0ef4', 'attached_at': '', 'detached_at': '', 'volume_id': 'd70a21ef-38af-445d-bae6-087172ff59dc', 'serial': 'd70a21ef-38af-445d-bae6-087172ff59dc'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 927.113333] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31977d87-eb54-4218-aabe-27091f782cd2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.131268] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc11efd-717e-4b29-80de-5711c147602e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.155370] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] volume-d70a21ef-38af-445d-bae6-087172ff59dc/volume-d70a21ef-38af-445d-bae6-087172ff59dc.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 927.156153] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93c64f75-31fb-4e85-8c31-3ae26a3304de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.174057] env[62627]: DEBUG oslo_vmware.api [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 927.174057] env[62627]: value = "task-2194547" [ 927.174057] env[62627]: _type = "Task" [ 927.174057] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.183789] env[62627]: DEBUG oslo_vmware.api [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194547, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.201281] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194544, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.211589] env[62627]: DEBUG oslo_vmware.api [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194545, 'name': PowerOffVM_Task, 'duration_secs': 0.188571} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.211832] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 927.212009] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 927.212487] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2be35dc2-1414-493e-8d62-b22be8750b0c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.237590] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 927.237590] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 927.237824] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Deleting the datastore file [datastore2] 7804914c-7df8-45c2-a934-67aa73afb2e2 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.241035] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b0ba2cb-575f-4009-84ec-f8d0d2517404 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.246354] env[62627]: DEBUG oslo_vmware.api [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for the task: (returnval){ [ 927.246354] env[62627]: value = "task-2194549" [ 927.246354] env[62627]: _type = "Task" [ 927.246354] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.254181] env[62627]: DEBUG oslo_vmware.api [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194549, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.334937] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194543, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.534343} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.335217] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 927.335403] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 927.335584] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 927.411513] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194546, 'name': CreateVM_Task, 'duration_secs': 0.494608} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.411690] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 927.412392] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.412559] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.412950] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 927.413232] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b64c9df-4070-4f30-b311-c8d0486c44fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.415190] env[62627]: DEBUG nova.compute.manager [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 927.421270] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 927.421270] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52712b87-d55e-a73c-6bbb-402bd6569c9b" [ 927.421270] env[62627]: _type = "Task" [ 927.421270] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.429664] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52712b87-d55e-a73c-6bbb-402bd6569c9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.502235] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fbd4e3fb-0e1c-412d-9116-6459271003c6 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.671s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.684385] env[62627]: DEBUG nova.compute.manager [req-aff48b16-bca1-4e75-bdf2-2c6bb2b370b2 req-9da3ec19-5029-47c1-b5f9-ee5823557b67 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Received event network-changed-62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 927.684676] env[62627]: DEBUG nova.compute.manager [req-aff48b16-bca1-4e75-bdf2-2c6bb2b370b2 req-9da3ec19-5029-47c1-b5f9-ee5823557b67 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Refreshing instance network info cache due to event network-changed-62b171c6-c991-4816-86e3-ba8a0862f911. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 927.684837] env[62627]: DEBUG oslo_concurrency.lockutils [req-aff48b16-bca1-4e75-bdf2-2c6bb2b370b2 req-9da3ec19-5029-47c1-b5f9-ee5823557b67 service nova] Acquiring lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.684982] env[62627]: DEBUG oslo_concurrency.lockutils [req-aff48b16-bca1-4e75-bdf2-2c6bb2b370b2 req-9da3ec19-5029-47c1-b5f9-ee5823557b67 service nova] Acquired lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.685159] env[62627]: DEBUG nova.network.neutron [req-aff48b16-bca1-4e75-bdf2-2c6bb2b370b2 req-9da3ec19-5029-47c1-b5f9-ee5823557b67 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Refreshing network info cache for port 62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 927.692753] env[62627]: DEBUG oslo_vmware.api [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194547, 'name': ReconfigVM_Task, 'duration_secs': 0.384264} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.697275] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Reconfigured VM instance instance-00000032 to attach disk [datastore2] volume-d70a21ef-38af-445d-bae6-087172ff59dc/volume-d70a21ef-38af-445d-bae6-087172ff59dc.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 927.706044] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7dfd2e6a-2292-46b1-a92b-b12f4b7636b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.725838] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194544, 'name': ReconfigVM_Task, 'duration_secs': 0.684208} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.727420] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 66882603-6ff6-49c2-9f42-3521ecaf98b2/66882603-6ff6-49c2-9f42-3521ecaf98b2.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 927.728380] env[62627]: DEBUG oslo_vmware.api [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 927.728380] env[62627]: value = "task-2194550" [ 927.728380] env[62627]: _type = "Task" [ 927.728380] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.728811] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11eb11d3-5c2a-4388-9679-817f4e35df41 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.740891] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for the task: (returnval){ [ 927.740891] env[62627]: value = "task-2194551" [ 927.740891] env[62627]: _type = "Task" [ 927.740891] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.744304] env[62627]: DEBUG oslo_vmware.api [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.756533] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194551, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.761896] env[62627]: DEBUG oslo_vmware.api [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Task: {'id': task-2194549, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11635} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.763473] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 927.763473] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 927.763473] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 927.763473] env[62627]: INFO nova.compute.manager [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 927.763473] env[62627]: DEBUG oslo.service.loopingcall [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.763473] env[62627]: DEBUG nova.compute.manager [-] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 927.763473] env[62627]: DEBUG nova.network.neutron [-] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 927.792535] env[62627]: DEBUG nova.network.neutron [-] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 927.863706] env[62627]: DEBUG nova.network.neutron [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Updated VIF entry in instance network info cache for port e7de9fde-1309-4a6a-bbb5-537333d9e42c. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 927.864252] env[62627]: DEBUG nova.network.neutron [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Updating instance_info_cache with network_info: [{"id": "e7de9fde-1309-4a6a-bbb5-537333d9e42c", "address": "fa:16:3e:e9:d1:53", "network": {"id": "6868e7f7-6e86-4ef3-a3f2-82b652203315", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-729196523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ad7b328aba44b51aec9455bada6678c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7de9fde-13", "ovs_interfaceid": "e7de9fde-1309-4a6a-bbb5-537333d9e42c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.938799] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52712b87-d55e-a73c-6bbb-402bd6569c9b, 'name': SearchDatastore_Task, 'duration_secs': 0.017037} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.939590] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.939590] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 927.939590] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.939811] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.939937] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 927.940215] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbfe7aec-881c-480a-8510-790c6949eb6e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.945757] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.952520] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 927.953643] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 927.953643] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d351eb75-9032-434a-a400-47827b9cb83d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.964446] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 927.964446] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e32613-ddc2-b67d-9193-bd5474bf7f3f" [ 927.964446] env[62627]: _type = "Task" [ 927.964446] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.973536] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e32613-ddc2-b67d-9193-bd5474bf7f3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.037333] env[62627]: DEBUG nova.network.neutron [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Successfully updated port: 7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.053846] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8160a8-2bd6-4406-96b6-be954b182643 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.062177] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfdbebc-402b-4779-920d-f53e6702c6dc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.093316] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2a760b-ccec-4764-8433-08cca27233e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.101057] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d087a27-c59e-42ad-8ccc-6ed2a77ae718 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.116408] env[62627]: DEBUG nova.compute.provider_tree [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.244269] env[62627]: DEBUG oslo_vmware.api [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194550, 'name': ReconfigVM_Task, 'duration_secs': 0.147701} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.244727] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447689', 'volume_id': 'd70a21ef-38af-445d-bae6-087172ff59dc', 'name': 'volume-d70a21ef-38af-445d-bae6-087172ff59dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd0786233-087b-4000-8c37-fd5c55cc0ef4', 'attached_at': '', 'detached_at': '', 'volume_id': 'd70a21ef-38af-445d-bae6-087172ff59dc', 'serial': 'd70a21ef-38af-445d-bae6-087172ff59dc'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 928.254426] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194551, 'name': Rename_Task, 'duration_secs': 0.238849} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.254426] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 928.255036] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a61664d-b501-4dba-b791-fc5ecd931087 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.263468] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for the task: (returnval){ [ 928.263468] env[62627]: value = "task-2194552" [ 928.263468] env[62627]: _type = "Task" [ 928.263468] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.272209] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.297292] env[62627]: DEBUG nova.network.neutron [-] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.367475] env[62627]: DEBUG oslo_concurrency.lockutils [req-d223cc60-0b48-4f37-8f28-639c5428894a req-d01475f2-ae96-4e58-bd49-fd6d3a9515ab service nova] Releasing lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.382578] env[62627]: DEBUG nova.compute.manager [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Received event network-vif-plugged-7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 928.382932] env[62627]: DEBUG oslo_concurrency.lockutils [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] Acquiring lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.383262] env[62627]: DEBUG oslo_concurrency.lockutils [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] Lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.383559] env[62627]: DEBUG oslo_concurrency.lockutils [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] Lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.383849] env[62627]: DEBUG nova.compute.manager [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] No waiting events found dispatching network-vif-plugged-7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 928.384142] env[62627]: WARNING nova.compute.manager [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Received unexpected event network-vif-plugged-7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14 for instance with vm_state building and task_state spawning. [ 928.384428] env[62627]: DEBUG nova.compute.manager [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Received event network-changed-7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 928.384719] env[62627]: DEBUG nova.compute.manager [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Refreshing instance network info cache due to event network-changed-7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 928.385026] env[62627]: DEBUG oslo_concurrency.lockutils [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] Acquiring lock "refresh_cache-4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.385291] env[62627]: DEBUG oslo_concurrency.lockutils [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] Acquired lock "refresh_cache-4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.385564] env[62627]: DEBUG nova.network.neutron [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Refreshing network info cache for port 7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 928.388753] env[62627]: DEBUG nova.virt.hardware [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 928.389172] env[62627]: DEBUG nova.virt.hardware [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.389463] env[62627]: DEBUG nova.virt.hardware [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 928.389820] env[62627]: DEBUG nova.virt.hardware [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.390105] env[62627]: DEBUG nova.virt.hardware [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 928.390369] env[62627]: DEBUG nova.virt.hardware [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 928.390691] env[62627]: DEBUG nova.virt.hardware [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 928.390982] env[62627]: DEBUG nova.virt.hardware [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 928.391300] env[62627]: DEBUG nova.virt.hardware [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 928.391586] env[62627]: DEBUG nova.virt.hardware [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 928.391982] env[62627]: DEBUG nova.virt.hardware [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 928.393575] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40aaa858-7339-4b83-adc9-df4aaac33298 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.407078] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1396384a-8134-4637-b956-a3d36e44f48b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.420606] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:8d:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81a799c8-db8c-4bae-8609-d4bf5b56c600', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 928.428832] env[62627]: DEBUG oslo.service.loopingcall [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.431502] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 928.431915] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d57dc6b-767f-46dc-8886-bac20a286d3f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.452871] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 928.452871] env[62627]: value = "task-2194553" [ 928.452871] env[62627]: _type = "Task" [ 928.452871] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.460891] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194553, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.473804] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e32613-ddc2-b67d-9193-bd5474bf7f3f, 'name': SearchDatastore_Task, 'duration_secs': 0.009808} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.474608] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1888a161-28c7-4789-ba0d-02aef4b664a3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.480397] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 928.480397] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]527286fb-42a1-af80-8e29-b8cd43652438" [ 928.480397] env[62627]: _type = "Task" [ 928.480397] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.488487] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527286fb-42a1-af80-8e29-b8cd43652438, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.539603] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquiring lock "refresh_cache-4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.619741] env[62627]: DEBUG nova.scheduler.client.report [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 928.623843] env[62627]: DEBUG nova.network.neutron [req-aff48b16-bca1-4e75-bdf2-2c6bb2b370b2 req-9da3ec19-5029-47c1-b5f9-ee5823557b67 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updated VIF entry in instance network info cache for port 62b171c6-c991-4816-86e3-ba8a0862f911. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 928.624222] env[62627]: DEBUG nova.network.neutron [req-aff48b16-bca1-4e75-bdf2-2c6bb2b370b2 req-9da3ec19-5029-47c1-b5f9-ee5823557b67 service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updating instance_info_cache with network_info: [{"id": "62b171c6-c991-4816-86e3-ba8a0862f911", "address": "fa:16:3e:d7:1a:59", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62b171c6-c9", "ovs_interfaceid": "62b171c6-c991-4816-86e3-ba8a0862f911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.774761] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194552, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.798183] env[62627]: INFO nova.compute.manager [-] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Took 1.03 seconds to deallocate network for instance. [ 928.943631] env[62627]: DEBUG nova.network.neutron [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 928.969011] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194553, 'name': CreateVM_Task, 'duration_secs': 0.328987} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.969194] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 928.969894] env[62627]: DEBUG oslo_concurrency.lockutils [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.970074] env[62627]: DEBUG oslo_concurrency.lockutils [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.970395] env[62627]: DEBUG oslo_concurrency.lockutils [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 928.970673] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0551b47e-0673-48a5-9203-b9c581eb2dcc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.975321] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 928.975321] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b66351-409e-6c06-b023-cde4a781938e" [ 928.975321] env[62627]: _type = "Task" [ 928.975321] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.983862] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b66351-409e-6c06-b023-cde4a781938e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.993813] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527286fb-42a1-af80-8e29-b8cd43652438, 'name': SearchDatastore_Task, 'duration_secs': 0.013057} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.994118] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.994391] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 38ab16cc-be4f-47b4-a818-ea864e853872/38ab16cc-be4f-47b4-a818-ea864e853872.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 928.994674] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f902394-5a05-471d-9714-88307255f419 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.001293] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 929.001293] env[62627]: value = "task-2194554" [ 929.001293] env[62627]: _type = "Task" [ 929.001293] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.009056] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.018351] env[62627]: DEBUG nova.network.neutron [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.126399] env[62627]: DEBUG oslo_concurrency.lockutils [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.156s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.128741] env[62627]: DEBUG oslo_concurrency.lockutils [req-aff48b16-bca1-4e75-bdf2-2c6bb2b370b2 req-9da3ec19-5029-47c1-b5f9-ee5823557b67 service nova] Releasing lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.129341] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 25.179s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.154997] env[62627]: INFO nova.scheduler.client.report [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleted allocations for instance 02216be1-cdb9-421a-8a23-4a919b376595 [ 929.275470] env[62627]: DEBUG oslo_vmware.api [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194552, 'name': PowerOnVM_Task, 'duration_secs': 0.743086} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.275857] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 929.276022] env[62627]: INFO nova.compute.manager [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Took 7.91 seconds to spawn the instance on the hypervisor. [ 929.276143] env[62627]: DEBUG nova.compute.manager [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 929.276921] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90399e70-03c3-48e7-9ad3-f03636778856 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.295532] env[62627]: DEBUG nova.objects.instance [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lazy-loading 'flavor' on Instance uuid d0786233-087b-4000-8c37-fd5c55cc0ef4 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.307659] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.441335] env[62627]: DEBUG oslo_concurrency.lockutils [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.487345] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b66351-409e-6c06-b023-cde4a781938e, 'name': SearchDatastore_Task, 'duration_secs': 0.014584} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.487674] env[62627]: DEBUG oslo_concurrency.lockutils [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.487912] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.488192] env[62627]: DEBUG oslo_concurrency.lockutils [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.488340] env[62627]: DEBUG oslo_concurrency.lockutils [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.488520] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.488851] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0555aa6a-83ee-431d-93ee-30073be0855a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.499234] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 929.499439] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 929.500226] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c5acc23-8701-4d1c-8a32-3f7f6fea8be4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.509759] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 929.509759] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d837a3-f7b2-2dac-2dcb-827d6fbf0995" [ 929.509759] env[62627]: _type = "Task" [ 929.509759] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.513497] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194554, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.522218] env[62627]: DEBUG oslo_concurrency.lockutils [req-ff79aeb0-6068-4ee9-9ac0-e20791b8bcc9 req-d4edd307-2cf9-4fb6-9be8-73189e2638b1 service nova] Releasing lock "refresh_cache-4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.522431] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d837a3-f7b2-2dac-2dcb-827d6fbf0995, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.522754] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquired lock "refresh_cache-4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.522912] env[62627]: DEBUG nova.network.neutron [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.635961] env[62627]: INFO nova.compute.claims [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.662850] env[62627]: DEBUG oslo_concurrency.lockutils [None req-906c8ed3-1050-42a5-8029-69cfd3c9d205 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "02216be1-cdb9-421a-8a23-4a919b376595" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.401s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.803960] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3ef77a08-4245-44fc-9fd4-427e567fe463 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.826s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.803960] env[62627]: DEBUG oslo_concurrency.lockutils [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.362s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.803960] env[62627]: DEBUG oslo_concurrency.lockutils [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "d0786233-087b-4000-8c37-fd5c55cc0ef4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.803960] env[62627]: DEBUG oslo_concurrency.lockutils [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.803960] env[62627]: DEBUG oslo_concurrency.lockutils [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.805781] env[62627]: INFO nova.compute.manager [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Took 33.32 seconds to build instance. [ 929.807261] env[62627]: INFO nova.compute.manager [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Terminating instance [ 929.816946] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "0de040e4-8896-4f02-9284-f68100254631" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.817624] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "0de040e4-8896-4f02-9284-f68100254631" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.013032] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194554, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.808383} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.013383] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 38ab16cc-be4f-47b4-a818-ea864e853872/38ab16cc-be4f-47b4-a818-ea864e853872.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.013383] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.013633] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b1a9442-327a-4add-9d2b-419f6217dfd4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.025406] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d837a3-f7b2-2dac-2dcb-827d6fbf0995, 'name': SearchDatastore_Task, 'duration_secs': 0.020042} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.029924] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 930.029924] env[62627]: value = "task-2194555" [ 930.029924] env[62627]: _type = "Task" [ 930.029924] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.029924] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e0c645b-cab9-43e1-9306-2c5b641626ce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.040513] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 930.040513] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dd06b1-98d9-ef4a-2931-3d5c03808f09" [ 930.040513] env[62627]: _type = "Task" [ 930.040513] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.043690] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194555, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.052609] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dd06b1-98d9-ef4a-2931-3d5c03808f09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.061816] env[62627]: DEBUG nova.network.neutron [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.144750] env[62627]: INFO nova.compute.resource_tracker [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating resource usage from migration f454579e-0768-41bf-8774-f65a3637cfab [ 930.254895] env[62627]: DEBUG nova.network.neutron [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Updating instance_info_cache with network_info: [{"id": "7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14", "address": "fa:16:3e:fa:ee:07", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.157", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e5db0e3-f9", "ovs_interfaceid": "7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.308769] env[62627]: DEBUG oslo_concurrency.lockutils [None req-937c4826-e746-4a65-8c0b-a24aaf6ef7ed tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Lock "66882603-6ff6-49c2-9f42-3521ecaf98b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.441s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.318306] env[62627]: DEBUG nova.compute.manager [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 930.318717] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.323136] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0778a0de-2fa7-4135-8fb4-87a501506d1a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.329761] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 930.329761] env[62627]: value = "task-2194556" [ 930.329761] env[62627]: _type = "Task" [ 930.329761] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.346078] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.556517] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194555, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066703} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.556517] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 930.556517] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd8a713-fe74-493d-99b9-de093b5fa8e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.567027] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dd06b1-98d9-ef4a-2931-3d5c03808f09, 'name': SearchDatastore_Task, 'duration_secs': 0.01495} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.567027] env[62627]: DEBUG oslo_concurrency.lockutils [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.567027] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 930.567027] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84222e96-611f-43fa-a952-7afeef4e716a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.595258] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 38ab16cc-be4f-47b4-a818-ea864e853872/38ab16cc-be4f-47b4-a818-ea864e853872.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.599317] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec3a5361-4af1-4620-bce4-8e872f79e3c7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.617241] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 930.617241] env[62627]: value = "task-2194557" [ 930.617241] env[62627]: _type = "Task" [ 930.617241] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.624998] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 930.624998] env[62627]: value = "task-2194558" [ 930.624998] env[62627]: _type = "Task" [ 930.624998] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.633618] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.641064] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194558, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.760589] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Releasing lock "refresh_cache-4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.760901] env[62627]: DEBUG nova.compute.manager [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Instance network_info: |[{"id": "7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14", "address": "fa:16:3e:fa:ee:07", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.157", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e5db0e3-f9", "ovs_interfaceid": "7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 930.761731] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d74a5a-6d47-4371-93da-7d89bf58e589 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.764696] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:ee:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.772995] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Creating folder: Project (ddc2513b33ec44b995cbd09fdf957495). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.773384] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a36f2dbe-d21f-40a6-8d96-075f29a13b1b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.780174] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0248d45-d69e-4bbe-bfe2-217c39b04efc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.784911] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Created folder: Project (ddc2513b33ec44b995cbd09fdf957495) in parent group-v447541. [ 930.785147] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Creating folder: Instances. Parent ref: group-v447695. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.785871] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e48d283b-4dc2-48eb-b659-d6c524899909 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.814344] env[62627]: DEBUG nova.compute.manager [req-91b62b92-f86a-4320-a189-4a70560348b6 req-3a5e21df-7289-46d4-9224-698870623ae0 service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Received event network-changed-3dc09d47-bf2c-4769-96df-8bc30f2bb658 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 930.814796] env[62627]: DEBUG nova.compute.manager [req-91b62b92-f86a-4320-a189-4a70560348b6 req-3a5e21df-7289-46d4-9224-698870623ae0 service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Refreshing instance network info cache due to event network-changed-3dc09d47-bf2c-4769-96df-8bc30f2bb658. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 930.814848] env[62627]: DEBUG oslo_concurrency.lockutils [req-91b62b92-f86a-4320-a189-4a70560348b6 req-3a5e21df-7289-46d4-9224-698870623ae0 service nova] Acquiring lock "refresh_cache-66882603-6ff6-49c2-9f42-3521ecaf98b2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.814974] env[62627]: DEBUG oslo_concurrency.lockutils [req-91b62b92-f86a-4320-a189-4a70560348b6 req-3a5e21df-7289-46d4-9224-698870623ae0 service nova] Acquired lock "refresh_cache-66882603-6ff6-49c2-9f42-3521ecaf98b2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.815152] env[62627]: DEBUG nova.network.neutron [req-91b62b92-f86a-4320-a189-4a70560348b6 req-3a5e21df-7289-46d4-9224-698870623ae0 service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Refreshing network info cache for port 3dc09d47-bf2c-4769-96df-8bc30f2bb658 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.817197] env[62627]: DEBUG nova.compute.manager [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 930.823496] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d2e4ae-d39d-44b5-8169-e7299b4b22f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.828573] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Created folder: Instances in parent group-v447695. [ 930.828788] env[62627]: DEBUG oslo.service.loopingcall [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.830865] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.831432] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be74d391-672a-4948-a799-bd3e9876f3ce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.849832] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1976020a-75dc-4905-b11b-7d7b8f992877 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.859215] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194556, 'name': PowerOffVM_Task, 'duration_secs': 0.213632} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.868398] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 930.868613] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 930.868931] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447689', 'volume_id': 'd70a21ef-38af-445d-bae6-087172ff59dc', 'name': 'volume-d70a21ef-38af-445d-bae6-087172ff59dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd0786233-087b-4000-8c37-fd5c55cc0ef4', 'attached_at': '', 'detached_at': '', 'volume_id': 'd70a21ef-38af-445d-bae6-087172ff59dc', 'serial': 'd70a21ef-38af-445d-bae6-087172ff59dc'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 930.869557] env[62627]: DEBUG nova.compute.provider_tree [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.874020] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.874020] env[62627]: value = "task-2194561" [ 930.874020] env[62627]: _type = "Task" [ 930.874020] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.874020] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc67ca91-6cae-4acb-861f-907ddb87872a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.884994] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194561, 'name': CreateVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.906186] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7821ed7-5d4e-4298-932b-ce0818efb480 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.915072] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1cd4b9-fc36-4559-9094-421c63995487 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.939505] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa264510-a9f7-4b4b-8d5b-02106a80c1f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.955806] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] The volume has not been displaced from its original location: [datastore2] volume-d70a21ef-38af-445d-bae6-087172ff59dc/volume-d70a21ef-38af-445d-bae6-087172ff59dc.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 930.961405] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Reconfiguring VM instance instance-00000032 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 930.962214] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4dfd427-7a22-4ba0-b868-a1ea17c87dff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.981539] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 930.981539] env[62627]: value = "task-2194562" [ 930.981539] env[62627]: _type = "Task" [ 930.981539] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.989352] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.128497] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194557, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.138389] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194558, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.342427] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.372136] env[62627]: DEBUG nova.scheduler.client.report [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 931.385222] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194561, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.490960] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194562, 'name': ReconfigVM_Task, 'duration_secs': 0.201063} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.491321] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Reconfigured VM instance instance-00000032 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 931.496180] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da02d9c5-9d80-4b6e-a6e5-4d6a382f7a6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.512597] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 931.512597] env[62627]: value = "task-2194563" [ 931.512597] env[62627]: _type = "Task" [ 931.512597] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.522716] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194563, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.628271] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194557, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766637} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.628271] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 931.628271] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 931.628427] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-934e3db2-66cc-4ab9-853c-e8ea02a76d1b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.636773] env[62627]: DEBUG nova.network.neutron [req-91b62b92-f86a-4320-a189-4a70560348b6 req-3a5e21df-7289-46d4-9224-698870623ae0 service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Updated VIF entry in instance network info cache for port 3dc09d47-bf2c-4769-96df-8bc30f2bb658. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.636773] env[62627]: DEBUG nova.network.neutron [req-91b62b92-f86a-4320-a189-4a70560348b6 req-3a5e21df-7289-46d4-9224-698870623ae0 service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Updating instance_info_cache with network_info: [{"id": "3dc09d47-bf2c-4769-96df-8bc30f2bb658", "address": "fa:16:3e:56:e9:8b", "network": {"id": "45ff759a-f1c7-4241-a5ca-571f2d5ba6b7", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1755040165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af9e3552cc0543bf8dc5562e645493fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dc09d47-bf", "ovs_interfaceid": "3dc09d47-bf2c-4769-96df-8bc30f2bb658", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.643114] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194558, 'name': ReconfigVM_Task, 'duration_secs': 0.652438} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.644468] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 38ab16cc-be4f-47b4-a818-ea864e853872/38ab16cc-be4f-47b4-a818-ea864e853872.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 931.645477] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 931.645477] env[62627]: value = "task-2194564" [ 931.645477] env[62627]: _type = "Task" [ 931.645477] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.645477] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12f2c5b4-aa6d-411f-9b63-9dd1573d67b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.657757] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194564, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.659412] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 931.659412] env[62627]: value = "task-2194565" [ 931.659412] env[62627]: _type = "Task" [ 931.659412] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.669030] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194565, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.881229] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.752s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.881769] env[62627]: INFO nova.compute.manager [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Migrating [ 931.887829] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.302s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.888069] env[62627]: DEBUG nova.objects.instance [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lazy-loading 'resources' on Instance uuid f1b70379-e13d-4007-ac2d-3d2d43cd5602 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.900447] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194561, 'name': CreateVM_Task, 'duration_secs': 0.690357} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.901054] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 931.901698] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.901857] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.902193] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 931.902665] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe4f9ec5-af64-4d04-8592-69d9a0a29847 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.907666] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for the task: (returnval){ [ 931.907666] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523beb0a-3990-0eea-b002-10c28fc99206" [ 931.907666] env[62627]: _type = "Task" [ 931.907666] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.916487] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523beb0a-3990-0eea-b002-10c28fc99206, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.924372] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.924792] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.021524] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194563, 'name': ReconfigVM_Task, 'duration_secs': 0.205544} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.022168] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447689', 'volume_id': 'd70a21ef-38af-445d-bae6-087172ff59dc', 'name': 'volume-d70a21ef-38af-445d-bae6-087172ff59dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd0786233-087b-4000-8c37-fd5c55cc0ef4', 'attached_at': '', 'detached_at': '', 'volume_id': 'd70a21ef-38af-445d-bae6-087172ff59dc', 'serial': 'd70a21ef-38af-445d-bae6-087172ff59dc'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 932.022667] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.023263] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0daa2b2-3e8e-4146-9aef-eb96f2386168 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.029414] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 932.029637] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7369fdaa-62e5-4a0b-9549-e4dd3248b600 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.087414] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 932.087617] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 932.087797] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleting the datastore file [datastore2] d0786233-087b-4000-8c37-fd5c55cc0ef4 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 932.088066] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c7728d0-aa0d-408d-8e40-a2cd6173cf5c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.094940] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 932.094940] env[62627]: value = "task-2194567" [ 932.094940] env[62627]: _type = "Task" [ 932.094940] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.102207] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.140496] env[62627]: DEBUG oslo_concurrency.lockutils [req-91b62b92-f86a-4320-a189-4a70560348b6 req-3a5e21df-7289-46d4-9224-698870623ae0 service nova] Releasing lock "refresh_cache-66882603-6ff6-49c2-9f42-3521ecaf98b2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.158121] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194564, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075862} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.158714] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 932.159869] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ae40c1-e5e8-470e-b937-63dcea2811b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.193843] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.196395] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bc16f5b-8959-427e-a6be-7950207e13bf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.210664] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194565, 'name': Rename_Task, 'duration_secs': 0.203546} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.211779] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.212276] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fa7aaaf-f2de-41bf-b599-a4c4b6363a97 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.217622] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 932.217622] env[62627]: value = "task-2194568" [ 932.217622] env[62627]: _type = "Task" [ 932.217622] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.221883] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 932.221883] env[62627]: value = "task-2194569" [ 932.221883] env[62627]: _type = "Task" [ 932.221883] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.227988] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194568, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.232395] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194569, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.401617] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.402099] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.402171] env[62627]: DEBUG nova.network.neutron [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 932.420286] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523beb0a-3990-0eea-b002-10c28fc99206, 'name': SearchDatastore_Task, 'duration_secs': 0.011134} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.423932] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.423932] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 932.424078] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.424413] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.424413] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.425312] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0aecee7-3397-4a42-b351-155a549416f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.433640] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.433640] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 932.444036] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.444134] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 932.445028] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49998a58-728f-46f9-a02d-cc0801624f1f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.452343] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for the task: (returnval){ [ 932.452343] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5296f32f-c8b8-808b-bbb6-a1a51197ed8f" [ 932.452343] env[62627]: _type = "Task" [ 932.452343] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.462483] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5296f32f-c8b8-808b-bbb6-a1a51197ed8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.605521] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.731598] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194568, 'name': ReconfigVM_Task, 'duration_secs': 0.304265} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.731906] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 7366e4b0-bdba-435d-844d-8e81c4464b31/7366e4b0-bdba-435d-844d-8e81c4464b31.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.732902] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-caea82b6-4f10-44fc-90ac-ef5d62af9774 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.737401] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194569, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.746294] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 932.746294] env[62627]: value = "task-2194570" [ 932.746294] env[62627]: _type = "Task" [ 932.746294] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.756425] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194570, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.792758] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9976c6b4-7878-4c94-b3d3-9baef0bc4ec2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.800138] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b138d60d-8767-4a9b-a8f7-ded57d1ed20c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.832524] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db865004-af53-4972-81ca-2b3537be3a51 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.840432] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfca859-fd5a-4937-81f9-ffcb5d2db11b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.854648] env[62627]: DEBUG nova.compute.provider_tree [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.962657] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5296f32f-c8b8-808b-bbb6-a1a51197ed8f, 'name': SearchDatastore_Task, 'duration_secs': 0.015435} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.964228] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e848977-e75d-4112-b982-b47135ca2c13 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.969947] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for the task: (returnval){ [ 932.969947] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52082efb-94bd-75f4-5ad6-c2c96ae730da" [ 932.969947] env[62627]: _type = "Task" [ 932.969947] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.978279] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52082efb-94bd-75f4-5ad6-c2c96ae730da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.107109] env[62627]: DEBUG oslo_vmware.api [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.711813} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.109424] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.109615] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.109813] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.110017] env[62627]: INFO nova.compute.manager [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Took 2.79 seconds to destroy the instance on the hypervisor. [ 933.110300] env[62627]: DEBUG oslo.service.loopingcall [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.110728] env[62627]: DEBUG nova.compute.manager [-] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 933.110728] env[62627]: DEBUG nova.network.neutron [-] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.151821] env[62627]: DEBUG nova.network.neutron [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance_info_cache with network_info: [{"id": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "address": "fa:16:3e:6b:ef:0c", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5f7c0b-f0", "ovs_interfaceid": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.236407] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194569, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.256252] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194570, 'name': Rename_Task, 'duration_secs': 0.150601} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.256519] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.256781] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c87c5ba0-d2fc-4e3b-8330-c26868750d8d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.263666] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 933.263666] env[62627]: value = "task-2194571" [ 933.263666] env[62627]: _type = "Task" [ 933.263666] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.271711] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194571, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.358431] env[62627]: DEBUG nova.scheduler.client.report [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 933.399861] env[62627]: DEBUG nova.compute.manager [req-77a21dc3-e267-4ee8-baa2-fb7b23f9e7c9 req-093c9964-7907-4204-ba5c-0bb93d29d0f4 service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Received event network-vif-deleted-8163fefd-89a3-4ff5-bbc9-e1561f91bc68 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 933.400481] env[62627]: INFO nova.compute.manager [req-77a21dc3-e267-4ee8-baa2-fb7b23f9e7c9 req-093c9964-7907-4204-ba5c-0bb93d29d0f4 service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Neutron deleted interface 8163fefd-89a3-4ff5-bbc9-e1561f91bc68; detaching it from the instance and deleting it from the info cache [ 933.400681] env[62627]: DEBUG nova.network.neutron [req-77a21dc3-e267-4ee8-baa2-fb7b23f9e7c9 req-093c9964-7907-4204-ba5c-0bb93d29d0f4 service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.471065] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.471227] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquired lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.471376] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Forcefully refreshing network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 933.482437] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52082efb-94bd-75f4-5ad6-c2c96ae730da, 'name': SearchDatastore_Task, 'duration_secs': 0.011436} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.482714] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.482962] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a/4e65e39e-3b0b-4c71-87d7-e1ed663ada4a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 933.483226] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c55d7c20-b762-461c-8a14-67f7b555b927 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.489883] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for the task: (returnval){ [ 933.489883] env[62627]: value = "task-2194572" [ 933.489883] env[62627]: _type = "Task" [ 933.489883] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.498797] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194572, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.653060] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.737397] env[62627]: DEBUG oslo_vmware.api [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194569, 'name': PowerOnVM_Task, 'duration_secs': 1.200837} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.737816] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.738149] env[62627]: INFO nova.compute.manager [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Took 9.64 seconds to spawn the instance on the hypervisor. [ 933.738476] env[62627]: DEBUG nova.compute.manager [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 933.739452] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e407fff9-2019-44f9-a5f7-1169e9383043 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.774248] env[62627]: DEBUG oslo_vmware.api [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194571, 'name': PowerOnVM_Task, 'duration_secs': 0.452844} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.774589] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.774809] env[62627]: DEBUG nova.compute.manager [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 933.775660] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596dc74f-40b4-416c-a126-fba1ada55a1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.866294] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.978s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.868740] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.225s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.870443] env[62627]: INFO nova.compute.claims [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.883331] env[62627]: DEBUG nova.network.neutron [-] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.899931] env[62627]: INFO nova.scheduler.client.report [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Deleted allocations for instance f1b70379-e13d-4007-ac2d-3d2d43cd5602 [ 933.903025] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70bfca20-3300-4f1e-9518-02f1fca50446 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.917710] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501876d1-c944-456a-b389-8207ce88a3fb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.957426] env[62627]: DEBUG nova.compute.manager [req-77a21dc3-e267-4ee8-baa2-fb7b23f9e7c9 req-093c9964-7907-4204-ba5c-0bb93d29d0f4 service nova] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Detach interface failed, port_id=8163fefd-89a3-4ff5-bbc9-e1561f91bc68, reason: Instance d0786233-087b-4000-8c37-fd5c55cc0ef4 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 934.000876] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194572, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.258231] env[62627]: INFO nova.compute.manager [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Took 34.09 seconds to build instance. [ 934.291045] env[62627]: DEBUG oslo_concurrency.lockutils [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.386905] env[62627]: INFO nova.compute.manager [-] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Took 1.28 seconds to deallocate network for instance. [ 934.416733] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd49e997-61fd-48e6-afd0-481021c08df8 tempest-MultipleCreateTestJSON-1897705394 tempest-MultipleCreateTestJSON-1897705394-project-member] Lock "f1b70379-e13d-4007-ac2d-3d2d43cd5602" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.490s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.500550] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194572, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61674} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.500743] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a/4e65e39e-3b0b-4c71-87d7-e1ed663ada4a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 934.500958] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 934.501223] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e57c655-b9ca-4be5-9ea4-be193dd8d3d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.507300] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for the task: (returnval){ [ 934.507300] env[62627]: value = "task-2194573" [ 934.507300] env[62627]: _type = "Task" [ 934.507300] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.516868] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.763350] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance_info_cache with network_info: [{"id": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "address": "fa:16:3e:4c:84:35", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.153", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d230b4-8a", "ovs_interfaceid": "a7d230b4-8ae5-4c90-8ccf-806a4056e883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.768024] env[62627]: DEBUG oslo_concurrency.lockutils [None req-da634f2f-99d7-4268-b120-9667049234eb tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "38ab16cc-be4f-47b4-a818-ea864e853872" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.249s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.943024] env[62627]: INFO nova.compute.manager [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Took 0.55 seconds to detach 1 volumes for instance. [ 935.023262] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194573, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.307378} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.023904] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.024237] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fc1768-e7be-4326-989c-d27f411cc588 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.047986] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a/4e65e39e-3b0b-4c71-87d7-e1ed663ada4a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.051243] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68a514b1-e211-4e9d-864a-58da1bf55cda {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.071178] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for the task: (returnval){ [ 935.071178] env[62627]: value = "task-2194574" [ 935.071178] env[62627]: _type = "Task" [ 935.071178] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.079513] env[62627]: INFO nova.compute.manager [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Rescuing [ 935.079751] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.079903] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.080081] env[62627]: DEBUG nova.network.neutron [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.081224] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194574, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.171346] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e13aba2-9feb-4555-8a08-a61ec4d348d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.190514] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance '892fc84b-02a9-4fa0-81b5-80326a060c64' progress to 0 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 935.266065] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Releasing lock "refresh_cache-ab999825-4310-4fd8-81f0-6bb5dde4175b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.266271] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updated the network info_cache for instance {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 935.266453] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.268618] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.268618] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.268618] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.268618] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.268618] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.268618] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 935.268618] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.271381] env[62627]: DEBUG nova.compute.manager [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 935.447119] env[62627]: DEBUG oslo_concurrency.lockutils [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.528461] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32095267-0a80-44dd-986e-934873289d3a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.537405] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15734600-b7ae-468b-8c30-03c989eac1af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.571596] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187b5ea0-4c17-4bb0-a4ce-e49303cc5f13 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.584900] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c0d688-f958-4eb6-956c-21afd6db2c6d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.588564] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.600810] env[62627]: DEBUG nova.compute.provider_tree [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.699216] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.699528] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a741c1a-698b-478c-a37d-22ecdafc7ae1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.708985] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 935.708985] env[62627]: value = "task-2194575" [ 935.708985] env[62627]: _type = "Task" [ 935.708985] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.718497] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.770433] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.794504] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.944860] env[62627]: DEBUG nova.network.neutron [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Updating instance_info_cache with network_info: [{"id": "e7de9fde-1309-4a6a-bbb5-537333d9e42c", "address": "fa:16:3e:e9:d1:53", "network": {"id": "6868e7f7-6e86-4ef3-a3f2-82b652203315", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-729196523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ad7b328aba44b51aec9455bada6678c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7de9fde-13", "ovs_interfaceid": "e7de9fde-1309-4a6a-bbb5-537333d9e42c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.028045] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.028045] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.028045] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.028623] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.028811] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.030735] env[62627]: INFO nova.compute.manager [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Terminating instance [ 936.086647] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194574, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.105350] env[62627]: DEBUG nova.scheduler.client.report [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 936.223460] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194575, 'name': PowerOffVM_Task, 'duration_secs': 0.332556} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.223739] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.223878] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance '892fc84b-02a9-4fa0-81b5-80326a060c64' progress to 17 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 936.448594] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Releasing lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.535258] env[62627]: DEBUG nova.compute.manager [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 936.536118] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.536939] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7cb364-b79a-4653-8c67-2e326f9f7a02 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.545222] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.545588] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4cb9dbd-88b5-4b87-b91c-d72918322842 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.552811] env[62627]: DEBUG oslo_vmware.api [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 936.552811] env[62627]: value = "task-2194576" [ 936.552811] env[62627]: _type = "Task" [ 936.552811] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.561089] env[62627]: DEBUG oslo_vmware.api [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194576, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.585684] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194574, 'name': ReconfigVM_Task, 'duration_secs': 1.037652} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.585962] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a/4e65e39e-3b0b-4c71-87d7-e1ed663ada4a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.586724] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79141ba6-a0e0-42e5-907c-9ada54d38382 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.592611] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for the task: (returnval){ [ 936.592611] env[62627]: value = "task-2194577" [ 936.592611] env[62627]: _type = "Task" [ 936.592611] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.600830] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194577, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.611874] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.743s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.613123] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.962s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.614705] env[62627]: INFO nova.compute.claims [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 936.735485] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 936.735736] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.735840] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 936.736018] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.736194] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 936.736363] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 936.736590] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 936.737107] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 936.737107] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 936.737107] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 936.737248] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 936.743527] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95563240-2e5b-4ec1-9e1e-4047b616fc37 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.759842] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 936.759842] env[62627]: value = "task-2194578" [ 936.759842] env[62627]: _type = "Task" [ 936.759842] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.770720] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194578, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.062714] env[62627]: DEBUG oslo_vmware.api [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194576, 'name': PowerOffVM_Task, 'duration_secs': 0.176675} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.063076] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.063208] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.063457] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27771128-5717-433c-b176-021003c6a51c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.102127] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194577, 'name': Rename_Task, 'duration_secs': 0.154964} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.102546] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 937.103050] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed1202a7-3bf2-4bfc-9e0e-7336f0b7a72e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.109210] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for the task: (returnval){ [ 937.109210] env[62627]: value = "task-2194580" [ 937.109210] env[62627]: _type = "Task" [ 937.109210] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.117122] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194580, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.118862] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquiring lock "13c3f062-2cfb-48b3-8abb-d78fe5eb40f6" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.119101] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "13c3f062-2cfb-48b3-8abb-d78fe5eb40f6" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.126837] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.127055] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.127238] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleting the datastore file [datastore1] 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.127718] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff36616e-bdae-4f00-b255-c2f90ad812a2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.137268] env[62627]: DEBUG oslo_vmware.api [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 937.137268] env[62627]: value = "task-2194581" [ 937.137268] env[62627]: _type = "Task" [ 937.137268] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.148017] env[62627]: DEBUG oslo_vmware.api [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.270587] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194578, 'name': ReconfigVM_Task, 'duration_secs': 0.222744} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.270902] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance '892fc84b-02a9-4fa0-81b5-80326a060c64' progress to 33 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 937.621737] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194580, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.623392] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "13c3f062-2cfb-48b3-8abb-d78fe5eb40f6" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.504s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.623895] env[62627]: DEBUG nova.compute.manager [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 937.646965] env[62627]: DEBUG oslo_vmware.api [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.777640] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:32:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='477bc599-8618-4d09-a5b7-e98bb785a942',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1076459801',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 937.777931] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 937.778369] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 937.778633] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 937.778790] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 937.779014] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 937.779540] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 937.779765] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 937.779953] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 937.780136] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 937.780585] env[62627]: DEBUG nova.virt.hardware [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 937.788589] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Reconfiguring VM instance instance-0000002d to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 937.791750] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35e287b2-be8e-4df5-99e1-2bb8e66a57c6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.813224] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 937.813224] env[62627]: value = "task-2194582" [ 937.813224] env[62627]: _type = "Task" [ 937.813224] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.826059] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194582, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.991722] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 937.992860] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7937304c-d1d7-4f80-aaec-2899558f0af4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.999153] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 937.999153] env[62627]: value = "task-2194583" [ 937.999153] env[62627]: _type = "Task" [ 937.999153] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.009094] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.119150] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194580, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.133124] env[62627]: DEBUG nova.compute.utils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 938.133124] env[62627]: DEBUG nova.compute.manager [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 938.133124] env[62627]: DEBUG nova.network.neutron [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 938.154114] env[62627]: DEBUG oslo_vmware.api [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.683585} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.154997] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.155326] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 938.155541] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 938.155718] env[62627]: INFO nova.compute.manager [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Took 1.62 seconds to destroy the instance on the hypervisor. [ 938.155957] env[62627]: DEBUG oslo.service.loopingcall [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 938.156940] env[62627]: DEBUG nova.compute.manager [-] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 938.157043] env[62627]: DEBUG nova.network.neutron [-] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.174213] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d091e1f7-c119-4a34-878d-4e7804c3f1d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.182223] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5041c4ef-fbff-45d8-a093-997ce79d7c8e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.217206] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0b7e4d-27f2-4565-9a6e-f4178908d25e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.221706] env[62627]: DEBUG nova.policy [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2cfa66e8ca06494f9f967106fae55ca1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d601ae06cf24ebca380f67af2425d85', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 938.227976] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ea52b0-a825-43e0-a138-608e658a87b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.243799] env[62627]: DEBUG nova.compute.provider_tree [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.324274] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194582, 'name': ReconfigVM_Task, 'duration_secs': 0.244408} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.324274] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Reconfigured VM instance instance-0000002d to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 938.325228] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b109f9c2-019c-4757-a63d-ccc5f7c9b072 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.350687] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 892fc84b-02a9-4fa0-81b5-80326a060c64/892fc84b-02a9-4fa0-81b5-80326a060c64.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 938.351507] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2b8d2a3-1283-40f3-ba16-bd253def0907 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.369929] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 938.369929] env[62627]: value = "task-2194584" [ 938.369929] env[62627]: _type = "Task" [ 938.369929] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.379295] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194584, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.508579] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194583, 'name': PowerOffVM_Task, 'duration_secs': 0.15361} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.508868] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 938.509697] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b42714-245a-4693-9dff-0269a38f298b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.514796] env[62627]: DEBUG nova.compute.manager [req-d9f6eef0-2ca0-4858-8757-5829fa3671c6 req-7281b69a-e6a6-4f51-9769-057cb32bdd43 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Received event network-vif-deleted-dca7e38e-83c7-4679-824f-f98d9fc325ad {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 938.514796] env[62627]: INFO nova.compute.manager [req-d9f6eef0-2ca0-4858-8757-5829fa3671c6 req-7281b69a-e6a6-4f51-9769-057cb32bdd43 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Neutron deleted interface dca7e38e-83c7-4679-824f-f98d9fc325ad; detaching it from the instance and deleting it from the info cache [ 938.514796] env[62627]: DEBUG nova.network.neutron [req-d9f6eef0-2ca0-4858-8757-5829fa3671c6 req-7281b69a-e6a6-4f51-9769-057cb32bdd43 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.532077] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca81ce29-242c-4beb-9fff-0cb7bdec9317 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.569168] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 938.569454] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe6861cc-c3cb-400d-bfdd-a769cf2a72e0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.575960] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 938.575960] env[62627]: value = "task-2194585" [ 938.575960] env[62627]: _type = "Task" [ 938.575960] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.583867] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194585, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.623973] env[62627]: DEBUG oslo_vmware.api [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194580, 'name': PowerOnVM_Task, 'duration_secs': 1.329389} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.623973] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.623973] env[62627]: INFO nova.compute.manager [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Took 11.88 seconds to spawn the instance on the hypervisor. [ 938.623973] env[62627]: DEBUG nova.compute.manager [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 938.623973] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e876533-9c5d-4267-ba19-f0bfc841dd86 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.636037] env[62627]: DEBUG nova.compute.manager [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 938.748263] env[62627]: DEBUG nova.scheduler.client.report [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 938.882094] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194584, 'name': ReconfigVM_Task, 'duration_secs': 0.259917} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.882456] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 892fc84b-02a9-4fa0-81b5-80326a060c64/892fc84b-02a9-4fa0-81b5-80326a060c64.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 938.882843] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance '892fc84b-02a9-4fa0-81b5-80326a060c64' progress to 50 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 938.974934] env[62627]: DEBUG nova.network.neutron [-] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.032571] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a51a6366-012e-4c4e-9d54-db17f8d862b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.041781] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53b5140-9055-4ca0-b8b5-a78d555b7e10 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.056521] env[62627]: DEBUG nova.network.neutron [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Successfully created port: 1edb8f37-0f69-4fa7-9649-7a07dad18ee3 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 939.083497] env[62627]: DEBUG nova.compute.manager [req-d9f6eef0-2ca0-4858-8757-5829fa3671c6 req-7281b69a-e6a6-4f51-9769-057cb32bdd43 service nova] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Detach interface failed, port_id=dca7e38e-83c7-4679-824f-f98d9fc325ad, reason: Instance 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 939.095730] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 939.095979] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 939.096250] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.096444] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.096573] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.096882] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64ffbc02-f986-4871-8d80-fb326cc8afea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.107995] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.108223] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 939.109057] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c16d8f29-a333-4385-baf9-eb7d719df069 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.116899] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 939.116899] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528c4ddf-4d77-ff7e-af43-928aefc5de8b" [ 939.116899] env[62627]: _type = "Task" [ 939.116899] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.127135] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528c4ddf-4d77-ff7e-af43-928aefc5de8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.139012] env[62627]: INFO nova.compute.manager [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Took 37.90 seconds to build instance. [ 939.256097] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.256609] env[62627]: DEBUG nova.compute.manager [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 939.259565] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.088s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.261100] env[62627]: INFO nova.compute.claims [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.390294] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a743ed8-b000-434e-9939-4963466345bf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.410219] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9d7470-5346-4e8b-869a-fa595f504558 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.427959] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance '892fc84b-02a9-4fa0-81b5-80326a060c64' progress to 67 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 939.477135] env[62627]: INFO nova.compute.manager [-] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Took 1.32 seconds to deallocate network for instance. [ 939.629245] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528c4ddf-4d77-ff7e-af43-928aefc5de8b, 'name': SearchDatastore_Task, 'duration_secs': 0.030662} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.630065] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12665ae6-244d-4d19-a8ad-5973798efe43 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.637615] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 939.637615] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52583666-4869-00e2-8776-de7244b354aa" [ 939.637615] env[62627]: _type = "Task" [ 939.637615] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.647821] env[62627]: DEBUG oslo_concurrency.lockutils [None req-36f813a5-8971-4cb9-953e-e3c96a530892 tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.749s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.648821] env[62627]: DEBUG nova.compute.manager [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 939.650670] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52583666-4869-00e2-8776-de7244b354aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.676604] env[62627]: DEBUG nova.virt.hardware [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 939.676808] env[62627]: DEBUG nova.virt.hardware [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.676962] env[62627]: DEBUG nova.virt.hardware [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 939.677155] env[62627]: DEBUG nova.virt.hardware [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.677299] env[62627]: DEBUG nova.virt.hardware [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 939.677441] env[62627]: DEBUG nova.virt.hardware [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 939.677650] env[62627]: DEBUG nova.virt.hardware [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 939.677807] env[62627]: DEBUG nova.virt.hardware [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 939.677971] env[62627]: DEBUG nova.virt.hardware [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 939.678146] env[62627]: DEBUG nova.virt.hardware [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 939.678319] env[62627]: DEBUG nova.virt.hardware [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 939.679176] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6967dd-1ab9-49b0-93c1-967c6d43815a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.686475] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec1bc14-efa3-4eac-b656-9dbb5f2d8601 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.762034] env[62627]: DEBUG nova.compute.utils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 939.764402] env[62627]: DEBUG nova.compute.manager [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 939.764713] env[62627]: DEBUG nova.network.neutron [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 939.851340] env[62627]: DEBUG nova.policy [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba14552786e54a559118919720c8b61e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd97a4a01696d4c4884df85a475c4bb7b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 939.985758] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.012027] env[62627]: DEBUG nova.network.neutron [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Port 4a5f7c0b-f029-481c-a96f-0d9789d6bad5 binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 940.081289] env[62627]: DEBUG nova.compute.manager [None req-efc79990-0be0-4b69-aa66-cb488764f4d2 tempest-ServerDiagnosticsTest-1737979326 tempest-ServerDiagnosticsTest-1737979326-project-admin] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 940.082807] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4397b4f-f507-4413-89f8-6e8e363faaad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.090528] env[62627]: INFO nova.compute.manager [None req-efc79990-0be0-4b69-aa66-cb488764f4d2 tempest-ServerDiagnosticsTest-1737979326 tempest-ServerDiagnosticsTest-1737979326-project-admin] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Retrieving diagnostics [ 940.090528] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066722fb-4d45-45a4-bc09-c5efb7adeb57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.148714] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52583666-4869-00e2-8776-de7244b354aa, 'name': SearchDatastore_Task, 'duration_secs': 0.022566} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.148999] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.149263] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 38ab16cc-be4f-47b4-a818-ea864e853872/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. {{(pid=62627) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 940.149521] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c2d6a16-5225-4ea8-862f-19864bb57513 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.156230] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 940.156230] env[62627]: value = "task-2194586" [ 940.156230] env[62627]: _type = "Task" [ 940.156230] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.165336] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194586, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.269917] env[62627]: DEBUG nova.compute.manager [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 940.276396] env[62627]: DEBUG nova.network.neutron [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Successfully created port: 383a9402-570b-4677-b3fd-f0854a5bb40f {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 940.673255] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194586, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486228} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.676538] env[62627]: INFO nova.virt.vmwareapi.ds_util [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 38ab16cc-be4f-47b4-a818-ea864e853872/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. [ 940.677126] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433d1998-ebcb-4289-8954-c0e8c0232cdc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.708871] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 38ab16cc-be4f-47b4-a818-ea864e853872/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.710284] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5e46bda-95fb-4d53-99bb-f1e91d459610 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.735014] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 940.735014] env[62627]: value = "task-2194587" [ 940.735014] env[62627]: _type = "Task" [ 940.735014] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.750754] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194587, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.866403] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beff1b08-22f6-44da-ad6e-2437cd69ec6c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.875663] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80ec68a-9693-466f-8962-5ecc616d9b27 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.912989] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511e5b02-8d98-4b5a-963c-4c48939217d7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.921298] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698f550c-7547-4bb3-80b6-225879422acc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.936915] env[62627]: DEBUG nova.compute.provider_tree [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.041202] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "892fc84b-02a9-4fa0-81b5-80326a060c64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.041674] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "892fc84b-02a9-4fa0-81b5-80326a060c64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.041907] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "892fc84b-02a9-4fa0-81b5-80326a060c64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.075529] env[62627]: DEBUG nova.network.neutron [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Successfully updated port: 1edb8f37-0f69-4fa7-9649-7a07dad18ee3 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 941.116654] env[62627]: DEBUG nova.compute.manager [req-19a42120-4f4a-4297-831e-e8abc5dae98c req-a3f304f0-d6b4-45af-b3e4-1d31d2168179 service nova] [instance: c560b907-500b-42fd-851b-61a04472b660] Received event network-vif-plugged-1edb8f37-0f69-4fa7-9649-7a07dad18ee3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 941.116864] env[62627]: DEBUG oslo_concurrency.lockutils [req-19a42120-4f4a-4297-831e-e8abc5dae98c req-a3f304f0-d6b4-45af-b3e4-1d31d2168179 service nova] Acquiring lock "c560b907-500b-42fd-851b-61a04472b660-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.117138] env[62627]: DEBUG oslo_concurrency.lockutils [req-19a42120-4f4a-4297-831e-e8abc5dae98c req-a3f304f0-d6b4-45af-b3e4-1d31d2168179 service nova] Lock "c560b907-500b-42fd-851b-61a04472b660-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.117318] env[62627]: DEBUG oslo_concurrency.lockutils [req-19a42120-4f4a-4297-831e-e8abc5dae98c req-a3f304f0-d6b4-45af-b3e4-1d31d2168179 service nova] Lock "c560b907-500b-42fd-851b-61a04472b660-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.117529] env[62627]: DEBUG nova.compute.manager [req-19a42120-4f4a-4297-831e-e8abc5dae98c req-a3f304f0-d6b4-45af-b3e4-1d31d2168179 service nova] [instance: c560b907-500b-42fd-851b-61a04472b660] No waiting events found dispatching network-vif-plugged-1edb8f37-0f69-4fa7-9649-7a07dad18ee3 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 941.117762] env[62627]: WARNING nova.compute.manager [req-19a42120-4f4a-4297-831e-e8abc5dae98c req-a3f304f0-d6b4-45af-b3e4-1d31d2168179 service nova] [instance: c560b907-500b-42fd-851b-61a04472b660] Received unexpected event network-vif-plugged-1edb8f37-0f69-4fa7-9649-7a07dad18ee3 for instance with vm_state building and task_state spawning. [ 941.248164] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194587, 'name': ReconfigVM_Task, 'duration_secs': 0.29029} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.248458] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 38ab16cc-be4f-47b4-a818-ea864e853872/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.249382] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef5cfe7-0ae3-43e7-ba6c-01426a6c716f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.275519] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e5412d4-2c6b-4ea7-a4de-219969d1e975 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.286703] env[62627]: DEBUG nova.compute.manager [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 941.295311] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 941.295311] env[62627]: value = "task-2194588" [ 941.295311] env[62627]: _type = "Task" [ 941.295311] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.305969] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194588, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.322209] env[62627]: DEBUG nova.virt.hardware [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 941.322540] env[62627]: DEBUG nova.virt.hardware [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.322732] env[62627]: DEBUG nova.virt.hardware [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 941.325610] env[62627]: DEBUG nova.virt.hardware [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.325610] env[62627]: DEBUG nova.virt.hardware [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 941.325610] env[62627]: DEBUG nova.virt.hardware [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 941.325610] env[62627]: DEBUG nova.virt.hardware [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 941.325610] env[62627]: DEBUG nova.virt.hardware [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 941.325610] env[62627]: DEBUG nova.virt.hardware [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 941.325610] env[62627]: DEBUG nova.virt.hardware [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 941.325610] env[62627]: DEBUG nova.virt.hardware [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 941.325610] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a6371a-6968-4e8b-bd42-f62579346555 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.335156] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf14090b-d221-4208-b556-b5a8670c45f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.364591] env[62627]: DEBUG oslo_concurrency.lockutils [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquiring lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.364937] env[62627]: DEBUG oslo_concurrency.lockutils [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.365172] env[62627]: DEBUG oslo_concurrency.lockutils [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquiring lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.365541] env[62627]: DEBUG oslo_concurrency.lockutils [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.365541] env[62627]: DEBUG oslo_concurrency.lockutils [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.367798] env[62627]: INFO nova.compute.manager [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Terminating instance [ 941.439949] env[62627]: DEBUG nova.scheduler.client.report [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 941.579653] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquiring lock "refresh_cache-c560b907-500b-42fd-851b-61a04472b660" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.579653] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquired lock "refresh_cache-c560b907-500b-42fd-851b-61a04472b660" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.579653] env[62627]: DEBUG nova.network.neutron [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 941.799380] env[62627]: DEBUG nova.compute.manager [req-b163a3c0-3b99-48cd-97ad-b9f1ba9ddba9 req-4c58bb1f-799b-4304-a020-08228b33ba87 service nova] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Received event network-vif-plugged-383a9402-570b-4677-b3fd-f0854a5bb40f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 941.800156] env[62627]: DEBUG oslo_concurrency.lockutils [req-b163a3c0-3b99-48cd-97ad-b9f1ba9ddba9 req-4c58bb1f-799b-4304-a020-08228b33ba87 service nova] Acquiring lock "5d288606-02b6-4d11-b285-7325e48fb5b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.800505] env[62627]: DEBUG oslo_concurrency.lockutils [req-b163a3c0-3b99-48cd-97ad-b9f1ba9ddba9 req-4c58bb1f-799b-4304-a020-08228b33ba87 service nova] Lock "5d288606-02b6-4d11-b285-7325e48fb5b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.800847] env[62627]: DEBUG oslo_concurrency.lockutils [req-b163a3c0-3b99-48cd-97ad-b9f1ba9ddba9 req-4c58bb1f-799b-4304-a020-08228b33ba87 service nova] Lock "5d288606-02b6-4d11-b285-7325e48fb5b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.801129] env[62627]: DEBUG nova.compute.manager [req-b163a3c0-3b99-48cd-97ad-b9f1ba9ddba9 req-4c58bb1f-799b-4304-a020-08228b33ba87 service nova] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] No waiting events found dispatching network-vif-plugged-383a9402-570b-4677-b3fd-f0854a5bb40f {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 941.801397] env[62627]: WARNING nova.compute.manager [req-b163a3c0-3b99-48cd-97ad-b9f1ba9ddba9 req-4c58bb1f-799b-4304-a020-08228b33ba87 service nova] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Received unexpected event network-vif-plugged-383a9402-570b-4677-b3fd-f0854a5bb40f for instance with vm_state building and task_state spawning. [ 941.817570] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194588, 'name': ReconfigVM_Task, 'duration_secs': 0.384096} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.817570] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 941.817570] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7584db4c-794f-4ad2-8c8d-190fb0219c2f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.829818] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 941.829818] env[62627]: value = "task-2194589" [ 941.829818] env[62627]: _type = "Task" [ 941.829818] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.840059] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194589, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.871910] env[62627]: DEBUG nova.compute.manager [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 941.876023] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.876023] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f67001-9ca6-4569-939f-c5d13af4fb90 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.881427] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.881709] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2785b23-abb7-42e8-843c-d2831f5d473e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.888456] env[62627]: DEBUG oslo_vmware.api [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for the task: (returnval){ [ 941.888456] env[62627]: value = "task-2194590" [ 941.888456] env[62627]: _type = "Task" [ 941.888456] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.898822] env[62627]: DEBUG oslo_vmware.api [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194590, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.914268] env[62627]: DEBUG nova.network.neutron [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Successfully updated port: 383a9402-570b-4677-b3fd-f0854a5bb40f {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 941.946060] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.946509] env[62627]: DEBUG nova.compute.manager [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 941.951660] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.040s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.952022] env[62627]: INFO nova.compute.claims [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.960781] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "e9d33611-e373-4303-937d-b43d893e51f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.961237] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.093577] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.093763] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.093937] env[62627]: DEBUG nova.network.neutron [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 942.115897] env[62627]: DEBUG nova.network.neutron [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 942.332963] env[62627]: DEBUG nova.network.neutron [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Updating instance_info_cache with network_info: [{"id": "1edb8f37-0f69-4fa7-9649-7a07dad18ee3", "address": "fa:16:3e:3a:b7:f0", "network": {"id": "bfead163-0daa-4112-af68-eff729463919", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1361857467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d601ae06cf24ebca380f67af2425d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1edb8f37-0f", "ovs_interfaceid": "1edb8f37-0f69-4fa7-9649-7a07dad18ee3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.344776] env[62627]: DEBUG oslo_vmware.api [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194589, 'name': PowerOnVM_Task, 'duration_secs': 0.519038} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.345071] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 942.348871] env[62627]: DEBUG nova.compute.manager [None req-b726f19c-2204-488d-bcb0-979bb24ebb6f tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 942.349697] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96683db1-4c36-4f04-ad24-f30c64b5d948 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.398610] env[62627]: DEBUG oslo_vmware.api [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194590, 'name': PowerOffVM_Task, 'duration_secs': 0.22728} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.398911] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 942.400542] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 942.400542] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb504371-bd60-45ee-b0af-02614674825c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.417069] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "refresh_cache-5d288606-02b6-4d11-b285-7325e48fb5b3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.417234] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquired lock "refresh_cache-5d288606-02b6-4d11-b285-7325e48fb5b3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.417386] env[62627]: DEBUG nova.network.neutron [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 942.463560] env[62627]: DEBUG nova.compute.utils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 942.466254] env[62627]: DEBUG nova.compute.manager [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 942.466428] env[62627]: DEBUG nova.network.neutron [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 942.469144] env[62627]: DEBUG nova.compute.manager [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 942.471422] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 942.471477] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 942.471648] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Deleting the datastore file [datastore1] 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 942.472665] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-638adc91-67f6-4fb8-85f2-e6d66ba52167 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.481115] env[62627]: DEBUG oslo_vmware.api [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for the task: (returnval){ [ 942.481115] env[62627]: value = "task-2194592" [ 942.481115] env[62627]: _type = "Task" [ 942.481115] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.491312] env[62627]: DEBUG oslo_vmware.api [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.524832] env[62627]: DEBUG nova.policy [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bba7aa4bcd4440d981320fa5065fe182', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7865bca76c2e4ca5858347fed1ae38fc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 942.837776] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Releasing lock "refresh_cache-c560b907-500b-42fd-851b-61a04472b660" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.838111] env[62627]: DEBUG nova.compute.manager [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Instance network_info: |[{"id": "1edb8f37-0f69-4fa7-9649-7a07dad18ee3", "address": "fa:16:3e:3a:b7:f0", "network": {"id": "bfead163-0daa-4112-af68-eff729463919", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1361857467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d601ae06cf24ebca380f67af2425d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1edb8f37-0f", "ovs_interfaceid": "1edb8f37-0f69-4fa7-9649-7a07dad18ee3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 942.839276] env[62627]: DEBUG nova.network.neutron [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance_info_cache with network_info: [{"id": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "address": "fa:16:3e:6b:ef:0c", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5f7c0b-f0", "ovs_interfaceid": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.840542] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:b7:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1edb8f37-0f69-4fa7-9649-7a07dad18ee3', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 942.848031] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Creating folder: Project (3d601ae06cf24ebca380f67af2425d85). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 942.848949] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a17d937-5b23-4a59-b5ae-18c65a967127 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.854789] env[62627]: DEBUG nova.network.neutron [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Successfully created port: 39451f1f-3666-45ef-8833-8b223f0961e5 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 942.862598] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Created folder: Project (3d601ae06cf24ebca380f67af2425d85) in parent group-v447541. [ 942.862598] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Creating folder: Instances. Parent ref: group-v447698. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 942.863785] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa799cdd-70e8-4e45-9416-6c28f1769ab4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.873148] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Created folder: Instances in parent group-v447698. [ 942.873148] env[62627]: DEBUG oslo.service.loopingcall [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.873704] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c560b907-500b-42fd-851b-61a04472b660] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 942.873913] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d6c3c6b-1721-4d4a-8ce7-5db51dd0ed18 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.896904] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 942.896904] env[62627]: value = "task-2194595" [ 942.896904] env[62627]: _type = "Task" [ 942.896904] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.907141] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194595, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.963614] env[62627]: DEBUG nova.network.neutron [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 942.976651] env[62627]: DEBUG nova.compute.manager [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 942.996277] env[62627]: DEBUG oslo_vmware.api [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Task: {'id': task-2194592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.507951} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.000476] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 943.000476] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 943.000476] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 943.000476] env[62627]: INFO nova.compute.manager [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 943.000476] env[62627]: DEBUG oslo.service.loopingcall [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.002857] env[62627]: DEBUG nova.compute.manager [-] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 943.002989] env[62627]: DEBUG nova.network.neutron [-] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 943.006240] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.256206] env[62627]: DEBUG nova.compute.manager [req-a8f86578-b5d3-498f-92a5-96d5e0682cd4 req-d2677070-3f6e-4d1e-ba91-23783b47215f service nova] [instance: c560b907-500b-42fd-851b-61a04472b660] Received event network-changed-1edb8f37-0f69-4fa7-9649-7a07dad18ee3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 943.256206] env[62627]: DEBUG nova.compute.manager [req-a8f86578-b5d3-498f-92a5-96d5e0682cd4 req-d2677070-3f6e-4d1e-ba91-23783b47215f service nova] [instance: c560b907-500b-42fd-851b-61a04472b660] Refreshing instance network info cache due to event network-changed-1edb8f37-0f69-4fa7-9649-7a07dad18ee3. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 943.256206] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8f86578-b5d3-498f-92a5-96d5e0682cd4 req-d2677070-3f6e-4d1e-ba91-23783b47215f service nova] Acquiring lock "refresh_cache-c560b907-500b-42fd-851b-61a04472b660" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.256206] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8f86578-b5d3-498f-92a5-96d5e0682cd4 req-d2677070-3f6e-4d1e-ba91-23783b47215f service nova] Acquired lock "refresh_cache-c560b907-500b-42fd-851b-61a04472b660" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.256206] env[62627]: DEBUG nova.network.neutron [req-a8f86578-b5d3-498f-92a5-96d5e0682cd4 req-d2677070-3f6e-4d1e-ba91-23783b47215f service nova] [instance: c560b907-500b-42fd-851b-61a04472b660] Refreshing network info cache for port 1edb8f37-0f69-4fa7-9649-7a07dad18ee3 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 943.302034] env[62627]: DEBUG nova.network.neutron [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Updating instance_info_cache with network_info: [{"id": "383a9402-570b-4677-b3fd-f0854a5bb40f", "address": "fa:16:3e:55:71:7a", "network": {"id": "d8e684f9-7aa5-4513-a4df-afb52c5cb804", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-671089707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d97a4a01696d4c4884df85a475c4bb7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap383a9402-57", "ovs_interfaceid": "383a9402-570b-4677-b3fd-f0854a5bb40f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.349764] env[62627]: DEBUG oslo_concurrency.lockutils [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.407943] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194595, 'name': CreateVM_Task, 'duration_secs': 0.36666} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.416016] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c560b907-500b-42fd-851b-61a04472b660] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 943.416016] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.416016] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.416016] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 943.416016] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-974741a1-d24f-4c99-91eb-116ca2fed138 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.417988] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for the task: (returnval){ [ 943.417988] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52664b9c-9414-4a9e-ca15-c89d100dcd27" [ 943.417988] env[62627]: _type = "Task" [ 943.417988] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.426665] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52664b9c-9414-4a9e-ca15-c89d100dcd27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.494177] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28405ca0-cbba-4628-97d8-c2fd0e9d1496 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.503061] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37862c8-5370-43dd-b10e-c4c3087e7165 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.539260] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c08236-2149-4503-b10f-17a6027cffa5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.550201] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afba9dc2-0dc5-4f1e-b65f-d01a7508eefe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.568031] env[62627]: DEBUG nova.compute.provider_tree [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.611496] env[62627]: INFO nova.compute.manager [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Unrescuing [ 943.612387] env[62627]: DEBUG oslo_concurrency.lockutils [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.612387] env[62627]: DEBUG oslo_concurrency.lockutils [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquired lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.612387] env[62627]: DEBUG nova.network.neutron [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.804825] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Releasing lock "refresh_cache-5d288606-02b6-4d11-b285-7325e48fb5b3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.804825] env[62627]: DEBUG nova.compute.manager [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Instance network_info: |[{"id": "383a9402-570b-4677-b3fd-f0854a5bb40f", "address": "fa:16:3e:55:71:7a", "network": {"id": "d8e684f9-7aa5-4513-a4df-afb52c5cb804", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-671089707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d97a4a01696d4c4884df85a475c4bb7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap383a9402-57", "ovs_interfaceid": "383a9402-570b-4677-b3fd-f0854a5bb40f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 943.810023] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:71:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '383a9402-570b-4677-b3fd-f0854a5bb40f', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 943.815262] env[62627]: DEBUG oslo.service.loopingcall [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.815511] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 943.816715] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1f2fd0c-7337-4973-98e2-b44125dee364 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.837128] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 943.837128] env[62627]: value = "task-2194596" [ 943.837128] env[62627]: _type = "Task" [ 943.837128] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.839754] env[62627]: DEBUG nova.compute.manager [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Received event network-changed-383a9402-570b-4677-b3fd-f0854a5bb40f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 943.839933] env[62627]: DEBUG nova.compute.manager [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Refreshing instance network info cache due to event network-changed-383a9402-570b-4677-b3fd-f0854a5bb40f. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 943.840182] env[62627]: DEBUG oslo_concurrency.lockutils [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] Acquiring lock "refresh_cache-5d288606-02b6-4d11-b285-7325e48fb5b3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.840295] env[62627]: DEBUG oslo_concurrency.lockutils [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] Acquired lock "refresh_cache-5d288606-02b6-4d11-b285-7325e48fb5b3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.840473] env[62627]: DEBUG nova.network.neutron [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Refreshing network info cache for port 383a9402-570b-4677-b3fd-f0854a5bb40f {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 943.852609] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194596, 'name': CreateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.874825] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34b3946-4023-4239-b68c-142a74aa882d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.899949] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de2bd47-b848-43b1-8b39-830873f69a7d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.908069] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance '892fc84b-02a9-4fa0-81b5-80326a060c64' progress to 83 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 943.928463] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52664b9c-9414-4a9e-ca15-c89d100dcd27, 'name': SearchDatastore_Task, 'duration_secs': 0.010641} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.931374] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.931582] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 943.931852] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.932219] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.932219] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 943.932873] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d4da3ee-a9f4-4a27-9121-69032e98be7f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.941776] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 943.941970] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 943.942733] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a318b24f-08cc-4e9a-822d-70fe2495d745 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.948334] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for the task: (returnval){ [ 943.948334] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cba745-1be4-6587-18d7-2bd16d19d23f" [ 943.948334] env[62627]: _type = "Task" [ 943.948334] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.957245] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cba745-1be4-6587-18d7-2bd16d19d23f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.983916] env[62627]: DEBUG nova.network.neutron [-] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.998951] env[62627]: DEBUG nova.compute.manager [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 944.027824] env[62627]: DEBUG nova.virt.hardware [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 944.028115] env[62627]: DEBUG nova.virt.hardware [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.028283] env[62627]: DEBUG nova.virt.hardware [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 944.028465] env[62627]: DEBUG nova.virt.hardware [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.028608] env[62627]: DEBUG nova.virt.hardware [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 944.028757] env[62627]: DEBUG nova.virt.hardware [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 944.028965] env[62627]: DEBUG nova.virt.hardware [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 944.029144] env[62627]: DEBUG nova.virt.hardware [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 944.029314] env[62627]: DEBUG nova.virt.hardware [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 944.029478] env[62627]: DEBUG nova.virt.hardware [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 944.029729] env[62627]: DEBUG nova.virt.hardware [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 944.030520] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d708ed58-b708-4413-8348-b72591f4404e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.038689] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80b35a8-5a52-4da1-9d49-75df759a4eb0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.053164] env[62627]: DEBUG nova.network.neutron [req-a8f86578-b5d3-498f-92a5-96d5e0682cd4 req-d2677070-3f6e-4d1e-ba91-23783b47215f service nova] [instance: c560b907-500b-42fd-851b-61a04472b660] Updated VIF entry in instance network info cache for port 1edb8f37-0f69-4fa7-9649-7a07dad18ee3. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 944.053500] env[62627]: DEBUG nova.network.neutron [req-a8f86578-b5d3-498f-92a5-96d5e0682cd4 req-d2677070-3f6e-4d1e-ba91-23783b47215f service nova] [instance: c560b907-500b-42fd-851b-61a04472b660] Updating instance_info_cache with network_info: [{"id": "1edb8f37-0f69-4fa7-9649-7a07dad18ee3", "address": "fa:16:3e:3a:b7:f0", "network": {"id": "bfead163-0daa-4112-af68-eff729463919", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1361857467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d601ae06cf24ebca380f67af2425d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1edb8f37-0f", "ovs_interfaceid": "1edb8f37-0f69-4fa7-9649-7a07dad18ee3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.070866] env[62627]: DEBUG nova.scheduler.client.report [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 944.350874] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194596, 'name': CreateVM_Task, 'duration_secs': 0.375759} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.351895] env[62627]: DEBUG nova.network.neutron [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Updating instance_info_cache with network_info: [{"id": "e7de9fde-1309-4a6a-bbb5-537333d9e42c", "address": "fa:16:3e:e9:d1:53", "network": {"id": "6868e7f7-6e86-4ef3-a3f2-82b652203315", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-729196523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6ad7b328aba44b51aec9455bada6678c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7de9fde-13", "ovs_interfaceid": "e7de9fde-1309-4a6a-bbb5-537333d9e42c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.353115] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 944.357231] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.357231] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.357231] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 944.357231] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa7569be-1638-453f-9ae8-379616284966 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.359867] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 944.359867] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529ee50f-a1b0-d936-dfee-59ed1756529c" [ 944.359867] env[62627]: _type = "Task" [ 944.359867] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.374061] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529ee50f-a1b0-d936-dfee-59ed1756529c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.415202] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 944.415544] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c051b34b-5d32-4513-918c-5a7263c56f6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.424172] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 944.424172] env[62627]: value = "task-2194597" [ 944.424172] env[62627]: _type = "Task" [ 944.424172] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.436078] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194597, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.464811] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cba745-1be4-6587-18d7-2bd16d19d23f, 'name': SearchDatastore_Task, 'duration_secs': 0.014718} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.465827] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d15efdc7-73af-4ba5-b1e7-907e7309ff2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.473155] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for the task: (returnval){ [ 944.473155] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52059207-b3b3-6ecd-f4f2-08fd5c8c644c" [ 944.473155] env[62627]: _type = "Task" [ 944.473155] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.486014] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52059207-b3b3-6ecd-f4f2-08fd5c8c644c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.486643] env[62627]: INFO nova.compute.manager [-] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Took 1.48 seconds to deallocate network for instance. [ 944.556765] env[62627]: DEBUG oslo_concurrency.lockutils [req-a8f86578-b5d3-498f-92a5-96d5e0682cd4 req-d2677070-3f6e-4d1e-ba91-23783b47215f service nova] Releasing lock "refresh_cache-c560b907-500b-42fd-851b-61a04472b660" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.576083] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.576647] env[62627]: DEBUG nova.compute.manager [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 944.579599] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.465s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.579599] env[62627]: DEBUG nova.objects.instance [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lazy-loading 'resources' on Instance uuid 574293e3-8ec8-410f-8a30-83bc60d25d64 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.645886] env[62627]: DEBUG nova.network.neutron [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Successfully updated port: 39451f1f-3666-45ef-8833-8b223f0961e5 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 944.660815] env[62627]: DEBUG nova.network.neutron [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Updated VIF entry in instance network info cache for port 383a9402-570b-4677-b3fd-f0854a5bb40f. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 944.661463] env[62627]: DEBUG nova.network.neutron [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Updating instance_info_cache with network_info: [{"id": "383a9402-570b-4677-b3fd-f0854a5bb40f", "address": "fa:16:3e:55:71:7a", "network": {"id": "d8e684f9-7aa5-4513-a4df-afb52c5cb804", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-671089707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d97a4a01696d4c4884df85a475c4bb7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap383a9402-57", "ovs_interfaceid": "383a9402-570b-4677-b3fd-f0854a5bb40f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.856568] env[62627]: DEBUG oslo_concurrency.lockutils [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Releasing lock "refresh_cache-38ab16cc-be4f-47b4-a818-ea864e853872" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.857334] env[62627]: DEBUG nova.objects.instance [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lazy-loading 'flavor' on Instance uuid 38ab16cc-be4f-47b4-a818-ea864e853872 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.871761] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529ee50f-a1b0-d936-dfee-59ed1756529c, 'name': SearchDatastore_Task, 'duration_secs': 0.010025} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.872449] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.872709] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 944.872921] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.937091] env[62627]: DEBUG oslo_vmware.api [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194597, 'name': PowerOnVM_Task, 'duration_secs': 0.436766} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.937948] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 944.937948] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-13af27a7-b232-499b-a1d6-f7276bc7fe91 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance '892fc84b-02a9-4fa0-81b5-80326a060c64' progress to 100 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 944.985029] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52059207-b3b3-6ecd-f4f2-08fd5c8c644c, 'name': SearchDatastore_Task, 'duration_secs': 0.010748} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.985029] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.985249] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] c560b907-500b-42fd-851b-61a04472b660/c560b907-500b-42fd-851b-61a04472b660.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 944.985495] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.985693] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 944.985911] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4b0d978-cf3d-437e-8616-03fbb7caab35 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.988528] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abae4b0a-256f-49f2-87cd-6fa030fed410 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.994807] env[62627]: DEBUG oslo_concurrency.lockutils [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.996574] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for the task: (returnval){ [ 944.996574] env[62627]: value = "task-2194598" [ 944.996574] env[62627]: _type = "Task" [ 944.996574] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.001340] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 945.001520] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 945.002571] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e786aba-bed6-4421-920f-e5621f9a908c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.008205] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.011738] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 945.011738] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52046ee7-9a90-cab1-79bc-b4a4170ac59f" [ 945.011738] env[62627]: _type = "Task" [ 945.011738] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.020566] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52046ee7-9a90-cab1-79bc-b4a4170ac59f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.083503] env[62627]: DEBUG nova.compute.utils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 945.085234] env[62627]: DEBUG nova.compute.manager [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 945.085420] env[62627]: DEBUG nova.network.neutron [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 945.140530] env[62627]: DEBUG nova.policy [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc7ce5392ecd486b962c3db9d43181e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f3a36c557145e38c528a628b1f3b67', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 945.148297] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquiring lock "refresh_cache-ae4ec050-5062-4e57-8090-f67f8c8013ff" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.148468] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquired lock "refresh_cache-ae4ec050-5062-4e57-8090-f67f8c8013ff" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.148570] env[62627]: DEBUG nova.network.neutron [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 945.164664] env[62627]: DEBUG oslo_concurrency.lockutils [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] Releasing lock "refresh_cache-5d288606-02b6-4d11-b285-7325e48fb5b3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.165062] env[62627]: DEBUG nova.compute.manager [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Received event network-vif-deleted-7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 945.165331] env[62627]: INFO nova.compute.manager [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Neutron deleted interface 7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14; detaching it from the instance and deleting it from the info cache [ 945.165685] env[62627]: DEBUG nova.network.neutron [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.285362] env[62627]: DEBUG nova.compute.manager [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Received event network-vif-plugged-39451f1f-3666-45ef-8833-8b223f0961e5 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 945.285415] env[62627]: DEBUG oslo_concurrency.lockutils [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] Acquiring lock "ae4ec050-5062-4e57-8090-f67f8c8013ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.285605] env[62627]: DEBUG oslo_concurrency.lockutils [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] Lock "ae4ec050-5062-4e57-8090-f67f8c8013ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.285798] env[62627]: DEBUG oslo_concurrency.lockutils [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] Lock "ae4ec050-5062-4e57-8090-f67f8c8013ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.285962] env[62627]: DEBUG nova.compute.manager [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] No waiting events found dispatching network-vif-plugged-39451f1f-3666-45ef-8833-8b223f0961e5 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 945.286163] env[62627]: WARNING nova.compute.manager [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Received unexpected event network-vif-plugged-39451f1f-3666-45ef-8833-8b223f0961e5 for instance with vm_state building and task_state spawning. [ 945.286385] env[62627]: DEBUG nova.compute.manager [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Received event network-changed-39451f1f-3666-45ef-8833-8b223f0961e5 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 945.286552] env[62627]: DEBUG nova.compute.manager [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Refreshing instance network info cache due to event network-changed-39451f1f-3666-45ef-8833-8b223f0961e5. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 945.286856] env[62627]: DEBUG oslo_concurrency.lockutils [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] Acquiring lock "refresh_cache-ae4ec050-5062-4e57-8090-f67f8c8013ff" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.366722] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6451d9-6217-4c2f-9657-773ff94fe8c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.388434] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.393646] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eaa7fee6-b8be-4d69-8736-3d9de51cf7ee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.398546] env[62627]: DEBUG oslo_vmware.api [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 945.398546] env[62627]: value = "task-2194599" [ 945.398546] env[62627]: _type = "Task" [ 945.398546] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.408047] env[62627]: DEBUG oslo_vmware.api [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.502461] env[62627]: DEBUG nova.network.neutron [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Successfully created port: 332cfd7e-9934-429a-bc74-275d48f07fdb {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 945.512496] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194598, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.521497] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52046ee7-9a90-cab1-79bc-b4a4170ac59f, 'name': SearchDatastore_Task, 'duration_secs': 0.009951} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.525112] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4103a691-1127-4a73-b69f-96f126dcfb07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.530697] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 945.530697] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52724a73-42a6-c378-b237-aa0fd4f57463" [ 945.530697] env[62627]: _type = "Task" [ 945.530697] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.539997] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52724a73-42a6-c378-b237-aa0fd4f57463, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.555351] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420e6da1-a0dc-4bf7-8d77-bc4f87550c17 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.563607] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1ae2b9-dcfa-46f8-93ee-6f03d006ffaf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.599248] env[62627]: DEBUG nova.compute.manager [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 945.603854] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f0247e-72f8-4526-9a0d-e3f23df86824 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.611968] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318b6422-5e82-492b-8a24-e702091046e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.629039] env[62627]: DEBUG nova.compute.provider_tree [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.668647] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0fe459e0-b527-42a5-a28a-f09b11fbe9cc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.678615] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245bd6e1-6e31-4047-b424-c16b12d2cc57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.715893] env[62627]: DEBUG nova.compute.manager [req-9022bc82-f4e3-492d-b388-95e85a3c1c2f req-e005f642-c2ba-46f2-b2f5-bf860b4a7c82 service nova] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Detach interface failed, port_id=7e5db0e3-f97d-48f9-89bd-9d1b24ac6d14, reason: Instance 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 945.717217] env[62627]: DEBUG nova.network.neutron [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 945.908600] env[62627]: DEBUG oslo_vmware.api [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194599, 'name': PowerOffVM_Task, 'duration_secs': 0.31737} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.908883] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.915310] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Reconfiguring VM instance instance-00000035 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 945.915310] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f796514-3c3b-483d-ae1a-82d46f858668 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.932761] env[62627]: DEBUG oslo_vmware.api [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 945.932761] env[62627]: value = "task-2194600" [ 945.932761] env[62627]: _type = "Task" [ 945.932761] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.938909] env[62627]: DEBUG nova.network.neutron [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Updating instance_info_cache with network_info: [{"id": "39451f1f-3666-45ef-8833-8b223f0961e5", "address": "fa:16:3e:98:66:ae", "network": {"id": "378f2580-0f19-413d-962d-b0d599d36125", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-493771480-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7865bca76c2e4ca5858347fed1ae38fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39451f1f-36", "ovs_interfaceid": "39451f1f-3666-45ef-8833-8b223f0961e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.943856] env[62627]: DEBUG oslo_vmware.api [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194600, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.007468] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194598, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.866341} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.008440] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] c560b907-500b-42fd-851b-61a04472b660/c560b907-500b-42fd-851b-61a04472b660.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 946.008659] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 946.008934] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0299f22b-3cc6-4a78-a23e-80d5e6f1bd42 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.018475] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for the task: (returnval){ [ 946.018475] env[62627]: value = "task-2194601" [ 946.018475] env[62627]: _type = "Task" [ 946.018475] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.025895] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194601, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.040134] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52724a73-42a6-c378-b237-aa0fd4f57463, 'name': SearchDatastore_Task, 'duration_secs': 0.063873} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.040134] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.040342] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 5d288606-02b6-4d11-b285-7325e48fb5b3/5d288606-02b6-4d11-b285-7325e48fb5b3.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 946.040593] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91f312c1-9412-4a61-934a-7550cdcd06de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.046826] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 946.046826] env[62627]: value = "task-2194602" [ 946.046826] env[62627]: _type = "Task" [ 946.046826] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.055107] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194602, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.132467] env[62627]: DEBUG nova.scheduler.client.report [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 946.442550] env[62627]: DEBUG oslo_vmware.api [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194600, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.446408] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Releasing lock "refresh_cache-ae4ec050-5062-4e57-8090-f67f8c8013ff" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.446408] env[62627]: DEBUG nova.compute.manager [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Instance network_info: |[{"id": "39451f1f-3666-45ef-8833-8b223f0961e5", "address": "fa:16:3e:98:66:ae", "network": {"id": "378f2580-0f19-413d-962d-b0d599d36125", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-493771480-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7865bca76c2e4ca5858347fed1ae38fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39451f1f-36", "ovs_interfaceid": "39451f1f-3666-45ef-8833-8b223f0961e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 946.446408] env[62627]: DEBUG oslo_concurrency.lockutils [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] Acquired lock "refresh_cache-ae4ec050-5062-4e57-8090-f67f8c8013ff" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.446408] env[62627]: DEBUG nova.network.neutron [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Refreshing network info cache for port 39451f1f-3666-45ef-8833-8b223f0961e5 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 946.449028] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:66:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d2e4070-a78e-4d08-a104-b6312ab65577', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39451f1f-3666-45ef-8833-8b223f0961e5', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.455466] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Creating folder: Project (7865bca76c2e4ca5858347fed1ae38fc). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 946.464408] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f839c8de-a6b1-4430-8ccf-44a30938bf99 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.475068] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Created folder: Project (7865bca76c2e4ca5858347fed1ae38fc) in parent group-v447541. [ 946.475273] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Creating folder: Instances. Parent ref: group-v447702. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 946.475516] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9dc84dbc-bebb-471e-848b-6f9a131ba84c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.485438] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Created folder: Instances in parent group-v447702. [ 946.485678] env[62627]: DEBUG oslo.service.loopingcall [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.485890] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 946.486105] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-106ffd3a-86da-457b-9c7a-0b43cf687f08 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.503816] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.503816] env[62627]: value = "task-2194605" [ 946.503816] env[62627]: _type = "Task" [ 946.503816] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.511972] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194605, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.526909] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194601, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067594} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.527159] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 946.527885] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a942ab-6728-4f03-8e1d-063801f1d404 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.550237] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] c560b907-500b-42fd-851b-61a04472b660/c560b907-500b-42fd-851b-61a04472b660.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 946.550769] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b06fcf1-433c-4d97-a42a-1bcaec0967f8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.573412] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194602, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.574701] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for the task: (returnval){ [ 946.574701] env[62627]: value = "task-2194606" [ 946.574701] env[62627]: _type = "Task" [ 946.574701] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.581991] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194606, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.608878] env[62627]: DEBUG nova.compute.manager [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 946.637079] env[62627]: DEBUG nova.virt.hardware [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 946.637470] env[62627]: DEBUG nova.virt.hardware [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.637681] env[62627]: DEBUG nova.virt.hardware [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 946.637962] env[62627]: DEBUG nova.virt.hardware [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.638056] env[62627]: DEBUG nova.virt.hardware [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 946.638213] env[62627]: DEBUG nova.virt.hardware [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 946.638516] env[62627]: DEBUG nova.virt.hardware [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 946.638792] env[62627]: DEBUG nova.virt.hardware [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 946.639056] env[62627]: DEBUG nova.virt.hardware [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 946.639266] env[62627]: DEBUG nova.virt.hardware [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 946.639476] env[62627]: DEBUG nova.virt.hardware [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 946.640436] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.061s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.643796] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8f6134-28ea-4597-bfc4-9a0b18f69705 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.647476] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.825s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.649132] env[62627]: INFO nova.compute.claims [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 946.660372] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0feb2dd8-a43a-488c-a771-aa9c984a151d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.678329] env[62627]: INFO nova.scheduler.client.report [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Deleted allocations for instance 574293e3-8ec8-410f-8a30-83bc60d25d64 [ 946.944426] env[62627]: DEBUG oslo_vmware.api [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194600, 'name': ReconfigVM_Task, 'duration_secs': 0.764528} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.944890] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Reconfigured VM instance instance-00000035 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 946.945105] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.945608] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0bb224a1-4c57-45a0-bfea-74f6637a324c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.952982] env[62627]: DEBUG oslo_vmware.api [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 946.952982] env[62627]: value = "task-2194607" [ 946.952982] env[62627]: _type = "Task" [ 946.952982] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.962311] env[62627]: DEBUG oslo_vmware.api [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194607, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.016243] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194605, 'name': CreateVM_Task, 'duration_secs': 0.349284} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.016243] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 947.016890] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.017086] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.017421] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 947.017685] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d2c27fd-8894-48eb-b26a-17b6c9ba1836 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.024057] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for the task: (returnval){ [ 947.024057] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52100607-d607-6320-4a96-8d25756ec00c" [ 947.024057] env[62627]: _type = "Task" [ 947.024057] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.032298] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52100607-d607-6320-4a96-8d25756ec00c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.060396] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194602, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.942378} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.060705] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 5d288606-02b6-4d11-b285-7325e48fb5b3/5d288606-02b6-4d11-b285-7325e48fb5b3.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 947.060932] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 947.061257] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b9a0314-5459-4696-8a59-84eb255deed8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.067766] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 947.067766] env[62627]: value = "task-2194608" [ 947.067766] env[62627]: _type = "Task" [ 947.067766] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.078610] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194608, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.087159] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194606, 'name': ReconfigVM_Task, 'duration_secs': 0.464646} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.087484] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Reconfigured VM instance instance-00000038 to attach disk [datastore1] c560b907-500b-42fd-851b-61a04472b660/c560b907-500b-42fd-851b-61a04472b660.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 947.088196] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-555e15c2-a567-44ac-aa0b-f014be8eca8f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.094180] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for the task: (returnval){ [ 947.094180] env[62627]: value = "task-2194609" [ 947.094180] env[62627]: _type = "Task" [ 947.094180] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.104938] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194609, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.192211] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a714923c-39ce-44fd-beff-e8a2003abba2 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "574293e3-8ec8-410f-8a30-83bc60d25d64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.798s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.220148] env[62627]: DEBUG nova.compute.manager [req-7cd22dcd-6cb3-4216-9b45-9117a93fb616 req-62eacb28-1aa1-4314-a45a-c0e65657429b service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Received event network-vif-plugged-332cfd7e-9934-429a-bc74-275d48f07fdb {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 947.220148] env[62627]: DEBUG oslo_concurrency.lockutils [req-7cd22dcd-6cb3-4216-9b45-9117a93fb616 req-62eacb28-1aa1-4314-a45a-c0e65657429b service nova] Acquiring lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.220271] env[62627]: DEBUG oslo_concurrency.lockutils [req-7cd22dcd-6cb3-4216-9b45-9117a93fb616 req-62eacb28-1aa1-4314-a45a-c0e65657429b service nova] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.220475] env[62627]: DEBUG oslo_concurrency.lockutils [req-7cd22dcd-6cb3-4216-9b45-9117a93fb616 req-62eacb28-1aa1-4314-a45a-c0e65657429b service nova] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.220677] env[62627]: DEBUG nova.compute.manager [req-7cd22dcd-6cb3-4216-9b45-9117a93fb616 req-62eacb28-1aa1-4314-a45a-c0e65657429b service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] No waiting events found dispatching network-vif-plugged-332cfd7e-9934-429a-bc74-275d48f07fdb {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 947.220895] env[62627]: WARNING nova.compute.manager [req-7cd22dcd-6cb3-4216-9b45-9117a93fb616 req-62eacb28-1aa1-4314-a45a-c0e65657429b service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Received unexpected event network-vif-plugged-332cfd7e-9934-429a-bc74-275d48f07fdb for instance with vm_state building and task_state spawning. [ 947.249661] env[62627]: DEBUG nova.network.neutron [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Successfully updated port: 332cfd7e-9934-429a-bc74-275d48f07fdb {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 947.280827] env[62627]: DEBUG nova.network.neutron [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Updated VIF entry in instance network info cache for port 39451f1f-3666-45ef-8833-8b223f0961e5. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 947.280827] env[62627]: DEBUG nova.network.neutron [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Updating instance_info_cache with network_info: [{"id": "39451f1f-3666-45ef-8833-8b223f0961e5", "address": "fa:16:3e:98:66:ae", "network": {"id": "378f2580-0f19-413d-962d-b0d599d36125", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-493771480-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7865bca76c2e4ca5858347fed1ae38fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39451f1f-36", "ovs_interfaceid": "39451f1f-3666-45ef-8833-8b223f0961e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.462615] env[62627]: DEBUG oslo_vmware.api [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194607, 'name': PowerOnVM_Task, 'duration_secs': 0.424994} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.462933] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.463138] env[62627]: DEBUG nova.compute.manager [None req-435d5302-8f40-4cb0-b035-30d528076bbc tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.463900] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57d81be-32fb-4434-935a-b3e7db1c5212 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.534870] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52100607-d607-6320-4a96-8d25756ec00c, 'name': SearchDatastore_Task, 'duration_secs': 0.012445} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.535208] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.535436] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 947.535718] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.535900] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.536152] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 947.536990] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-422b8f72-cbc1-4a36-aaf6-4c8fd32e1a8c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.546057] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 947.546247] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 947.546978] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74dc06ec-e676-4dbe-a841-ce818cb5f72a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.553310] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for the task: (returnval){ [ 947.553310] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5244ca36-bc5d-0d81-a4bc-16440552f536" [ 947.553310] env[62627]: _type = "Task" [ 947.553310] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.562213] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5244ca36-bc5d-0d81-a4bc-16440552f536, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.579603] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194608, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075151} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.579854] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 947.580621] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526b11cd-a1df-414b-a0dd-541464fcbaf8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.603897] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 5d288606-02b6-4d11-b285-7325e48fb5b3/5d288606-02b6-4d11-b285-7325e48fb5b3.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 947.606762] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e160aa88-db9c-41a5-8d94-d37efb711403 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.625334] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194609, 'name': Rename_Task, 'duration_secs': 0.164174} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.628679] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 947.628988] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 947.628988] env[62627]: value = "task-2194610" [ 947.628988] env[62627]: _type = "Task" [ 947.628988] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.629182] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0223293c-9320-4366-838e-56f2a53575f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.639912] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194610, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.641057] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for the task: (returnval){ [ 947.641057] env[62627]: value = "task-2194611" [ 947.641057] env[62627]: _type = "Task" [ 947.641057] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.647879] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194611, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.754635] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.754865] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.754989] env[62627]: DEBUG nova.network.neutron [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 947.784416] env[62627]: DEBUG oslo_concurrency.lockutils [req-92c0eb01-d173-411b-80dd-eb853a02d3b5 req-a40c8542-c13b-4745-a8b4-c615a22df918 service nova] Releasing lock "refresh_cache-ae4ec050-5062-4e57-8090-f67f8c8013ff" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.068029] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5244ca36-bc5d-0d81-a4bc-16440552f536, 'name': SearchDatastore_Task, 'duration_secs': 0.017265} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.069295] env[62627]: DEBUG nova.network.neutron [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Port 4a5f7c0b-f029-481c-a96f-0d9789d6bad5 binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 948.069295] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.069456] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.069628] env[62627]: DEBUG nova.network.neutron [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.071383] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63cea09f-482a-495a-9bbe-078ccf146fca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.079399] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for the task: (returnval){ [ 948.079399] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522026d3-8d9a-b28d-625a-58b5a874a087" [ 948.079399] env[62627]: _type = "Task" [ 948.079399] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.088513] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522026d3-8d9a-b28d-625a-58b5a874a087, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.126914] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a56605-f0e9-4363-9345-ffcce659c056 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.137886] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848fca04-0120-49a7-aa8e-307425d34a78 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.148871] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194610, 'name': ReconfigVM_Task, 'duration_secs': 0.309765} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.174556] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 5d288606-02b6-4d11-b285-7325e48fb5b3/5d288606-02b6-4d11-b285-7325e48fb5b3.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.178066] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dea80077-aa98-4c12-b4f5-6a7ab4798227 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.180220] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88dd396c-8d32-42c0-9b76-977af7bffb1b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.182740] env[62627]: DEBUG oslo_vmware.api [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194611, 'name': PowerOnVM_Task, 'duration_secs': 0.487969} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.183569] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 948.183784] env[62627]: INFO nova.compute.manager [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Took 8.53 seconds to spawn the instance on the hypervisor. [ 948.187025] env[62627]: DEBUG nova.compute.manager [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 948.187025] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2039d4-d721-43d9-a193-1178d4714b02 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.190799] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 948.190799] env[62627]: value = "task-2194612" [ 948.190799] env[62627]: _type = "Task" [ 948.190799] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.192195] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585da331-a971-41ae-9371-7e84b542ded7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.212307] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194612, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.222626] env[62627]: DEBUG nova.compute.provider_tree [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.313844] env[62627]: DEBUG nova.network.neutron [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 948.555644] env[62627]: DEBUG nova.network.neutron [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance_info_cache with network_info: [{"id": "332cfd7e-9934-429a-bc74-275d48f07fdb", "address": "fa:16:3e:68:4c:72", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap332cfd7e-99", "ovs_interfaceid": "332cfd7e-9934-429a-bc74-275d48f07fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.592567] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522026d3-8d9a-b28d-625a-58b5a874a087, 'name': SearchDatastore_Task, 'duration_secs': 0.018841} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.592730] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.593120] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] ae4ec050-5062-4e57-8090-f67f8c8013ff/ae4ec050-5062-4e57-8090-f67f8c8013ff.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 948.593756] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d3a1216-8f1f-4e42-a908-c6d63c304b3d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.602040] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for the task: (returnval){ [ 948.602040] env[62627]: value = "task-2194613" [ 948.602040] env[62627]: _type = "Task" [ 948.602040] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.614789] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.618340] env[62627]: DEBUG oslo_concurrency.lockutils [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "38ab16cc-be4f-47b4-a818-ea864e853872" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.618794] env[62627]: DEBUG oslo_concurrency.lockutils [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "38ab16cc-be4f-47b4-a818-ea864e853872" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.619248] env[62627]: DEBUG oslo_concurrency.lockutils [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "38ab16cc-be4f-47b4-a818-ea864e853872-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.620618] env[62627]: DEBUG oslo_concurrency.lockutils [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "38ab16cc-be4f-47b4-a818-ea864e853872-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.620618] env[62627]: DEBUG oslo_concurrency.lockutils [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "38ab16cc-be4f-47b4-a818-ea864e853872-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.622320] env[62627]: INFO nova.compute.manager [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Terminating instance [ 948.718133] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194612, 'name': Rename_Task, 'duration_secs': 0.148646} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.718133] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 948.718133] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0608a9d8-4fa3-4f3a-9dc0-c69fa7b6d03b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.721137] env[62627]: INFO nova.compute.manager [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Took 43.11 seconds to build instance. [ 948.726807] env[62627]: DEBUG nova.scheduler.client.report [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 948.735214] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 948.735214] env[62627]: value = "task-2194614" [ 948.735214] env[62627]: _type = "Task" [ 948.735214] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.749130] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194614, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.923185] env[62627]: DEBUG nova.network.neutron [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance_info_cache with network_info: [{"id": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "address": "fa:16:3e:6b:ef:0c", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5f7c0b-f0", "ovs_interfaceid": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.061017] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.061017] env[62627]: DEBUG nova.compute.manager [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Instance network_info: |[{"id": "332cfd7e-9934-429a-bc74-275d48f07fdb", "address": "fa:16:3e:68:4c:72", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap332cfd7e-99", "ovs_interfaceid": "332cfd7e-9934-429a-bc74-275d48f07fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 949.061017] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:4c:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '332cfd7e-9934-429a-bc74-275d48f07fdb', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.068264] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating folder: Project (a9f3a36c557145e38c528a628b1f3b67). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.069854] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a49d2840-f55e-48c9-b1ba-527831a08aa3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.081492] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Created folder: Project (a9f3a36c557145e38c528a628b1f3b67) in parent group-v447541. [ 949.081907] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating folder: Instances. Parent ref: group-v447705. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.082261] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78c89854-d5b5-4659-abc6-10dac65b950c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.092050] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Created folder: Instances in parent group-v447705. [ 949.092384] env[62627]: DEBUG oslo.service.loopingcall [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.092620] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.092870] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57170d13-3763-404c-964e-a85b0535b07f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.118026] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194613, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.119480] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.119480] env[62627]: value = "task-2194617" [ 949.119480] env[62627]: _type = "Task" [ 949.119480] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.128966] env[62627]: DEBUG nova.compute.manager [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 949.129338] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.129763] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194617, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.130422] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43afc800-e704-40a0-8c39-2a3682909555 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.140089] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.141057] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17310da4-f29f-4f01-b582-4d16540cccf7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.149525] env[62627]: DEBUG oslo_vmware.api [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 949.149525] env[62627]: value = "task-2194618" [ 949.149525] env[62627]: _type = "Task" [ 949.149525] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.158398] env[62627]: DEBUG oslo_vmware.api [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194618, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.226895] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cefb9977-e139-4cf0-85fc-4e49647a62eb tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "c560b907-500b-42fd-851b-61a04472b660" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.166s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.240094] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.591s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.240094] env[62627]: DEBUG nova.compute.manager [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 949.244281] env[62627]: DEBUG oslo_concurrency.lockutils [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.265s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.244281] env[62627]: DEBUG nova.objects.instance [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Lazy-loading 'resources' on Instance uuid c9088ac9-456b-4c92-8773-79c263e7cc2a {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 949.257371] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194614, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.285606] env[62627]: DEBUG nova.compute.manager [req-be323825-0427-44ca-a231-0ad600943e83 req-7a4c03ae-dc8a-4af6-9df2-aeec55332a42 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Received event network-changed-332cfd7e-9934-429a-bc74-275d48f07fdb {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 949.285606] env[62627]: DEBUG nova.compute.manager [req-be323825-0427-44ca-a231-0ad600943e83 req-7a4c03ae-dc8a-4af6-9df2-aeec55332a42 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Refreshing instance network info cache due to event network-changed-332cfd7e-9934-429a-bc74-275d48f07fdb. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 949.286340] env[62627]: DEBUG oslo_concurrency.lockutils [req-be323825-0427-44ca-a231-0ad600943e83 req-7a4c03ae-dc8a-4af6-9df2-aeec55332a42 service nova] Acquiring lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.286945] env[62627]: DEBUG oslo_concurrency.lockutils [req-be323825-0427-44ca-a231-0ad600943e83 req-7a4c03ae-dc8a-4af6-9df2-aeec55332a42 service nova] Acquired lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.287279] env[62627]: DEBUG nova.network.neutron [req-be323825-0427-44ca-a231-0ad600943e83 req-7a4c03ae-dc8a-4af6-9df2-aeec55332a42 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Refreshing network info cache for port 332cfd7e-9934-429a-bc74-275d48f07fdb {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.429847] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.620609] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194613, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.749296} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.624098] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] ae4ec050-5062-4e57-8090-f67f8c8013ff/ae4ec050-5062-4e57-8090-f67f8c8013ff.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 949.624325] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 949.624592] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8829b470-2563-416a-892e-e49db59e58b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.631250] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194617, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.633074] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for the task: (returnval){ [ 949.633074] env[62627]: value = "task-2194619" [ 949.633074] env[62627]: _type = "Task" [ 949.633074] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.640770] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194619, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.657910] env[62627]: DEBUG oslo_vmware.api [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194618, 'name': PowerOffVM_Task, 'duration_secs': 0.306023} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.658191] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 949.658352] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 949.658620] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8d9b465-0c0a-4d90-9629-b1ced92e87d0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.734737] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 949.735030] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 949.735242] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Deleting the datastore file [datastore2] 38ab16cc-be4f-47b4-a818-ea864e853872 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.735535] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d895cdc3-55a7-44b0-86a6-73288cf9c4f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.742038] env[62627]: DEBUG oslo_vmware.api [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 949.742038] env[62627]: value = "task-2194621" [ 949.742038] env[62627]: _type = "Task" [ 949.742038] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.754009] env[62627]: DEBUG nova.compute.utils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 949.756471] env[62627]: DEBUG oslo_vmware.api [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194621, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.760104] env[62627]: DEBUG nova.compute.manager [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Not allocating networking since 'none' was specified. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 949.760334] env[62627]: DEBUG oslo_vmware.api [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194614, 'name': PowerOnVM_Task, 'duration_secs': 0.623883} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.760548] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 949.760740] env[62627]: INFO nova.compute.manager [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Took 8.47 seconds to spawn the instance on the hypervisor. [ 949.760910] env[62627]: DEBUG nova.compute.manager [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 949.761664] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40fd81c-c97e-4400-8076-50a5d7e47ee8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.863140] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquiring lock "c560b907-500b-42fd-851b-61a04472b660" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.863389] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "c560b907-500b-42fd-851b-61a04472b660" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.863593] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquiring lock "c560b907-500b-42fd-851b-61a04472b660-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.863779] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "c560b907-500b-42fd-851b-61a04472b660-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.863943] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "c560b907-500b-42fd-851b-61a04472b660-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.866092] env[62627]: INFO nova.compute.manager [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Terminating instance [ 949.932068] env[62627]: DEBUG nova.compute.manager [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62627) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 949.932358] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.125686] env[62627]: DEBUG nova.network.neutron [req-be323825-0427-44ca-a231-0ad600943e83 req-7a4c03ae-dc8a-4af6-9df2-aeec55332a42 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updated VIF entry in instance network info cache for port 332cfd7e-9934-429a-bc74-275d48f07fdb. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.125686] env[62627]: DEBUG nova.network.neutron [req-be323825-0427-44ca-a231-0ad600943e83 req-7a4c03ae-dc8a-4af6-9df2-aeec55332a42 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance_info_cache with network_info: [{"id": "332cfd7e-9934-429a-bc74-275d48f07fdb", "address": "fa:16:3e:68:4c:72", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap332cfd7e-99", "ovs_interfaceid": "332cfd7e-9934-429a-bc74-275d48f07fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.134503] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194617, 'name': CreateVM_Task, 'duration_secs': 0.515033} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.138186] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 950.141560] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.141659] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.141973] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 950.143681] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-760b1672-2fe9-4001-9d98-2989347c5e0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.149039] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194619, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063451} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.149447] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.150272] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6f2b48-58c1-40e8-aaca-485f937042c6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.154765] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 950.154765] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5254fa02-d1d4-a87b-141d-974cbe26d3a9" [ 950.154765] env[62627]: _type = "Task" [ 950.154765] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.181078] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] ae4ec050-5062-4e57-8090-f67f8c8013ff/ae4ec050-5062-4e57-8090-f67f8c8013ff.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.184614] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b7f4675-bc32-46e6-bffc-fa608245756c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.202476] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5254fa02-d1d4-a87b-141d-974cbe26d3a9, 'name': SearchDatastore_Task, 'duration_secs': 0.031806} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.203172] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.203421] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.203655] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.203821] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.204039] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.204729] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab7e3ddf-0af0-4a58-a637-bc013c57b827 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.208525] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for the task: (returnval){ [ 950.208525] env[62627]: value = "task-2194622" [ 950.208525] env[62627]: _type = "Task" [ 950.208525] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.218651] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.218833] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 950.219555] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194622, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.219766] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6754fdca-5684-4585-84ec-ecb4bd4b0b50 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.227022] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 950.227022] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5249fc25-95f7-21c1-926e-8f61993266fe" [ 950.227022] env[62627]: _type = "Task" [ 950.227022] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.233635] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5249fc25-95f7-21c1-926e-8f61993266fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.251658] env[62627]: DEBUG oslo_vmware.api [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194621, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.253313] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a5ce0f-5021-44ab-a88c-5f7108e44b35 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.260034] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf91eef-89f4-442b-8c83-42022e3463fa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.263547] env[62627]: DEBUG nova.compute.manager [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 950.302200] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838ba02a-08b1-468c-927a-11fbb8c65d73 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.305228] env[62627]: INFO nova.compute.manager [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Took 44.69 seconds to build instance. [ 950.312452] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21643eb1-cfaa-42db-bceb-9f41166e49d1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.327276] env[62627]: DEBUG nova.compute.provider_tree [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.370136] env[62627]: DEBUG nova.compute.manager [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 950.370369] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 950.371982] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837a2f48-a606-4ce4-92a5-f94966694922 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.379032] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.379270] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ef51d6f-6411-48e3-b26b-5c2e01293b30 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.384849] env[62627]: DEBUG oslo_vmware.api [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for the task: (returnval){ [ 950.384849] env[62627]: value = "task-2194623" [ 950.384849] env[62627]: _type = "Task" [ 950.384849] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.392874] env[62627]: DEBUG oslo_vmware.api [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.630121] env[62627]: DEBUG oslo_concurrency.lockutils [req-be323825-0427-44ca-a231-0ad600943e83 req-7a4c03ae-dc8a-4af6-9df2-aeec55332a42 service nova] Releasing lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.718784] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194622, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.733189] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5249fc25-95f7-21c1-926e-8f61993266fe, 'name': SearchDatastore_Task, 'duration_secs': 0.018136} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.733956] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12ffa16f-0afb-4890-92f9-6450f0c21781 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.738823] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 950.738823] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52638ccd-4ea6-f3f6-3941-d87a8a047239" [ 950.738823] env[62627]: _type = "Task" [ 950.738823] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.746335] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52638ccd-4ea6-f3f6-3941-d87a8a047239, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.753628] env[62627]: DEBUG oslo_vmware.api [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194621, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.558476} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.753864] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.754254] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 950.754469] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 950.754644] env[62627]: INFO nova.compute.manager [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Took 1.63 seconds to destroy the instance on the hypervisor. [ 950.754907] env[62627]: DEBUG oslo.service.loopingcall [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.755117] env[62627]: DEBUG nova.compute.manager [-] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 950.755216] env[62627]: DEBUG nova.network.neutron [-] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 950.809869] env[62627]: DEBUG oslo_concurrency.lockutils [None req-355ab114-cc8f-4f20-829e-7f29011ee8ef tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "5d288606-02b6-4d11-b285-7325e48fb5b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.646s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.830855] env[62627]: DEBUG nova.scheduler.client.report [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 950.870836] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.871406] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.906758] env[62627]: DEBUG oslo_vmware.api [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194623, 'name': PowerOffVM_Task, 'duration_secs': 0.30941} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.906758] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.906758] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.906758] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-849db768-9922-48c0-bc7b-65ae82fe58a8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.969071] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.969071] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.969220] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Deleting the datastore file [datastore1] c560b907-500b-42fd-851b-61a04472b660 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.969588] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-837b1d10-a14b-43a0-9436-fbaf692cd540 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.977898] env[62627]: DEBUG oslo_vmware.api [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for the task: (returnval){ [ 950.977898] env[62627]: value = "task-2194625" [ 950.977898] env[62627]: _type = "Task" [ 950.977898] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.987059] env[62627]: DEBUG oslo_vmware.api [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.107974] env[62627]: DEBUG nova.compute.manager [req-5f66a4f9-c523-41dd-a7c5-e08997fc2e78 req-88e2b241-5a7a-4c4c-9d20-2a8ec2204867 service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Received event network-vif-deleted-e7de9fde-1309-4a6a-bbb5-537333d9e42c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 951.108389] env[62627]: INFO nova.compute.manager [req-5f66a4f9-c523-41dd-a7c5-e08997fc2e78 req-88e2b241-5a7a-4c4c-9d20-2a8ec2204867 service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Neutron deleted interface e7de9fde-1309-4a6a-bbb5-537333d9e42c; detaching it from the instance and deleting it from the info cache [ 951.108606] env[62627]: DEBUG nova.network.neutron [req-5f66a4f9-c523-41dd-a7c5-e08997fc2e78 req-88e2b241-5a7a-4c4c-9d20-2a8ec2204867 service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.219293] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194622, 'name': ReconfigVM_Task, 'duration_secs': 0.776444} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.219609] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Reconfigured VM instance instance-0000003a to attach disk [datastore1] ae4ec050-5062-4e57-8090-f67f8c8013ff/ae4ec050-5062-4e57-8090-f67f8c8013ff.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.220651] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e05007c-25d2-484e-920b-67f4c79bf89e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.228482] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for the task: (returnval){ [ 951.228482] env[62627]: value = "task-2194626" [ 951.228482] env[62627]: _type = "Task" [ 951.228482] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.242141] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194626, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.250427] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52638ccd-4ea6-f3f6-3941-d87a8a047239, 'name': SearchDatastore_Task, 'duration_secs': 0.028967} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.250681] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.250935] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93/d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.251212] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3e4d147-7679-46ce-a933-123025b1e53e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.257334] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 951.257334] env[62627]: value = "task-2194627" [ 951.257334] env[62627]: _type = "Task" [ 951.257334] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.265475] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194627, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.272934] env[62627]: DEBUG nova.compute.manager [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 951.300957] env[62627]: DEBUG nova.virt.hardware [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 951.301350] env[62627]: DEBUG nova.virt.hardware [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.301552] env[62627]: DEBUG nova.virt.hardware [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 951.301795] env[62627]: DEBUG nova.virt.hardware [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.301988] env[62627]: DEBUG nova.virt.hardware [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 951.302189] env[62627]: DEBUG nova.virt.hardware [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 951.302461] env[62627]: DEBUG nova.virt.hardware [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 951.302697] env[62627]: DEBUG nova.virt.hardware [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 951.302904] env[62627]: DEBUG nova.virt.hardware [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 951.303143] env[62627]: DEBUG nova.virt.hardware [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 951.303370] env[62627]: DEBUG nova.virt.hardware [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 951.304365] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a3b6e9-4cb6-4154-9806-6597b10bb973 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.314753] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4cc94a7-7b2b-4626-9163-7b23c97ac64f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.328560] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 951.333968] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Creating folder: Project (27c5ac159a7d4d0f8f8058511d42feb6). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 951.334313] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89aa7b7f-215e-4c4c-80f3-be98b78ef275 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.336708] env[62627]: DEBUG oslo_concurrency.lockutils [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.094s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.338840] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.307s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.340399] env[62627]: INFO nova.compute.claims [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 951.352701] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Created folder: Project (27c5ac159a7d4d0f8f8058511d42feb6) in parent group-v447541. [ 951.352701] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Creating folder: Instances. Parent ref: group-v447708. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 951.352701] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc48d132-e537-4f33-9d57-47180755a6ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.359181] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Created folder: Instances in parent group-v447708. [ 951.359181] env[62627]: DEBUG oslo.service.loopingcall [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 951.359311] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 951.359418] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48d1ab06-94a3-469b-840f-66d313dda464 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.373927] env[62627]: INFO nova.scheduler.client.report [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Deleted allocations for instance c9088ac9-456b-4c92-8773-79c263e7cc2a [ 951.375430] env[62627]: DEBUG nova.compute.manager [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 951.385555] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 951.385555] env[62627]: value = "task-2194630" [ 951.385555] env[62627]: _type = "Task" [ 951.385555] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.394807] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194630, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.488852] env[62627]: DEBUG oslo_vmware.api [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Task: {'id': task-2194625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.3502} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.489127] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.489345] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.489527] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.489728] env[62627]: INFO nova.compute.manager [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] [instance: c560b907-500b-42fd-851b-61a04472b660] Took 1.12 seconds to destroy the instance on the hypervisor. [ 951.490013] env[62627]: DEBUG oslo.service.loopingcall [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 951.490230] env[62627]: DEBUG nova.compute.manager [-] [instance: c560b907-500b-42fd-851b-61a04472b660] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 951.490325] env[62627]: DEBUG nova.network.neutron [-] [instance: c560b907-500b-42fd-851b-61a04472b660] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 951.590781] env[62627]: DEBUG nova.network.neutron [-] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.612650] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01bbba86-f280-4c47-9850-1051850dd98e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.624995] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e012536-f431-461f-9134-7b424595b8f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.675597] env[62627]: DEBUG nova.compute.manager [req-5f66a4f9-c523-41dd-a7c5-e08997fc2e78 req-88e2b241-5a7a-4c4c-9d20-2a8ec2204867 service nova] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Detach interface failed, port_id=e7de9fde-1309-4a6a-bbb5-537333d9e42c, reason: Instance 38ab16cc-be4f-47b4-a818-ea864e853872 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 951.740901] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194626, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.773355] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194627, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.899530] env[62627]: DEBUG oslo_concurrency.lockutils [None req-984eae37-fe79-4d36-9fa0-d8596dd2f782 tempest-ServerAddressesNegativeTestJSON-609171485 tempest-ServerAddressesNegativeTestJSON-609171485-project-member] Lock "c9088ac9-456b-4c92-8773-79c263e7cc2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.529s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.910666] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194630, 'name': CreateVM_Task, 'duration_secs': 0.438106} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.911902] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.912150] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 951.912599] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.912807] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.913196] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 951.913471] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcaf132b-be72-42dc-b917-aa2e933ba2de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.924547] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for the task: (returnval){ [ 951.924547] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521d9d4e-09b8-813c-3542-097c27e728b7" [ 951.924547] env[62627]: _type = "Task" [ 951.924547] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.936487] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521d9d4e-09b8-813c-3542-097c27e728b7, 'name': SearchDatastore_Task, 'duration_secs': 0.011059} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.936487] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.936775] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.936965] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.937178] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.937348] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.937640] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14f7fd91-4c0f-4519-9119-4f3948365847 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.946406] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.946581] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.947434] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bb290e5-aa97-47dc-87bc-7b7b09c655f3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.955201] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for the task: (returnval){ [ 951.955201] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523435d0-a507-39ed-bde6-293f9ad96e0f" [ 951.955201] env[62627]: _type = "Task" [ 951.955201] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.964126] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523435d0-a507-39ed-bde6-293f9ad96e0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.093758] env[62627]: INFO nova.compute.manager [-] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Took 1.34 seconds to deallocate network for instance. [ 952.239687] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194626, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.272064] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194627, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532665} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.273049] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93/d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 952.273049] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 952.273049] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c889eaed-8366-4460-acdb-159048667260 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.278576] env[62627]: DEBUG nova.network.neutron [-] [instance: c560b907-500b-42fd-851b-61a04472b660] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.280643] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 952.280643] env[62627]: value = "task-2194631" [ 952.280643] env[62627]: _type = "Task" [ 952.280643] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.291048] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194631, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.467577] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523435d0-a507-39ed-bde6-293f9ad96e0f, 'name': SearchDatastore_Task, 'duration_secs': 0.009829} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.468401] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e037803-7aaa-44b0-b53d-535ea130e277 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.475728] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for the task: (returnval){ [ 952.475728] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5280cc4e-f129-7763-20cc-8921e401cd91" [ 952.475728] env[62627]: _type = "Task" [ 952.475728] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.484907] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5280cc4e-f129-7763-20cc-8921e401cd91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.605127] env[62627]: DEBUG oslo_concurrency.lockutils [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.741833] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194626, 'name': Rename_Task, 'duration_secs': 1.148156} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.742188] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.742521] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09655fc7-f2d9-4af1-9adc-c514adae8b63 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.748931] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for the task: (returnval){ [ 952.748931] env[62627]: value = "task-2194632" [ 952.748931] env[62627]: _type = "Task" [ 952.748931] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.756873] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.781862] env[62627]: INFO nova.compute.manager [-] [instance: c560b907-500b-42fd-851b-61a04472b660] Took 1.29 seconds to deallocate network for instance. [ 952.797178] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194631, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067119} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.797883] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 952.799064] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72118046-b024-48d5-9e32-fd150b33d0ca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.823216] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93/d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.826545] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-808e4d64-a31d-478b-874f-07e96967876d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.848049] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 952.848049] env[62627]: value = "task-2194633" [ 952.848049] env[62627]: _type = "Task" [ 952.848049] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.861828] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194633, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.892770] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1310c8f6-1151-4110-a161-b177a7432b80 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.900809] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df49f50-903d-4498-a36f-23490ed51985 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.943152] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eadddf76-e6d9-46f3-b6be-d72a1e6917b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.951828] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9aef23a-7595-4b42-8e3e-05f198a7ea5b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.965769] env[62627]: DEBUG nova.compute.provider_tree [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.986115] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5280cc4e-f129-7763-20cc-8921e401cd91, 'name': SearchDatastore_Task, 'duration_secs': 0.032698} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.986115] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.986372] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a/8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 952.986616] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3df7a6b-d3c9-41a3-82d7-40c6459e6438 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.992854] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for the task: (returnval){ [ 952.992854] env[62627]: value = "task-2194634" [ 952.992854] env[62627]: _type = "Task" [ 952.992854] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.001618] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.139413] env[62627]: DEBUG nova.compute.manager [req-1d798e00-5e09-4888-b67d-d8d97dc5007a req-856e4db1-4da3-4d3f-ba3b-e23eaf0266f8 service nova] [instance: c560b907-500b-42fd-851b-61a04472b660] Received event network-vif-deleted-1edb8f37-0f69-4fa7-9649-7a07dad18ee3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 953.261093] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194632, 'name': PowerOnVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.291914] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.366630] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194633, 'name': ReconfigVM_Task, 'duration_secs': 0.343572} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.367838] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Reconfigured VM instance instance-0000003b to attach disk [datastore2] d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93/d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.368653] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0600bca-9049-4d62-9267-16acb774faa3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.378248] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 953.378248] env[62627]: value = "task-2194635" [ 953.378248] env[62627]: _type = "Task" [ 953.378248] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.393408] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194635, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.473017] env[62627]: DEBUG nova.scheduler.client.report [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.507559] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194634, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489806} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.507559] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a/8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.507559] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.507559] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f63f6c3a-c630-454e-a5cb-1405745a55cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.513622] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for the task: (returnval){ [ 953.513622] env[62627]: value = "task-2194636" [ 953.513622] env[62627]: _type = "Task" [ 953.513622] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.524527] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194636, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.761383] env[62627]: DEBUG oslo_vmware.api [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194632, 'name': PowerOnVM_Task, 'duration_secs': 0.592275} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.761383] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.761383] env[62627]: INFO nova.compute.manager [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Took 9.76 seconds to spawn the instance on the hypervisor. [ 953.761836] env[62627]: DEBUG nova.compute.manager [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 953.762410] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88867e17-ab0a-4df7-9e24-6db26ec58cef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.888835] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194635, 'name': Rename_Task, 'duration_secs': 0.196288} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.889133] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 953.889409] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6779e27b-25af-47f8-866f-7b212bdcc890 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.896294] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 953.896294] env[62627]: value = "task-2194637" [ 953.896294] env[62627]: _type = "Task" [ 953.896294] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.905433] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194637, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.962316] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "5d288606-02b6-4d11-b285-7325e48fb5b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.962585] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "5d288606-02b6-4d11-b285-7325e48fb5b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.962894] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "5d288606-02b6-4d11-b285-7325e48fb5b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.963122] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "5d288606-02b6-4d11-b285-7325e48fb5b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.963349] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "5d288606-02b6-4d11-b285-7325e48fb5b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.965593] env[62627]: INFO nova.compute.manager [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Terminating instance [ 953.976644] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.638s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.977143] env[62627]: DEBUG nova.compute.manager [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 953.979689] env[62627]: DEBUG oslo_concurrency.lockutils [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.551s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.979908] env[62627]: DEBUG nova.objects.instance [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lazy-loading 'resources' on Instance uuid 77c60059-535a-47ca-a5b0-45cc1df93b45 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 954.024530] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194636, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093339} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.025264] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 954.025678] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833e9db6-cfeb-4571-b448-92a483bfa0a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.045224] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a/8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.045493] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bacfaf92-eb0f-4259-be51-9705baa37ee9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.065341] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for the task: (returnval){ [ 954.065341] env[62627]: value = "task-2194638" [ 954.065341] env[62627]: _type = "Task" [ 954.065341] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.072748] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194638, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.281465] env[62627]: INFO nova.compute.manager [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Took 48.14 seconds to build instance. [ 954.407273] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194637, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.470901] env[62627]: DEBUG nova.compute.manager [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 954.471073] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 954.472272] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f32959-fd15-42a4-9fe6-64d1a4bd6d84 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.479151] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 954.479412] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f54097ae-29a8-4f16-b326-99909600c7e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.483391] env[62627]: DEBUG nova.compute.utils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 954.488401] env[62627]: DEBUG nova.compute.manager [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 954.488580] env[62627]: DEBUG nova.network.neutron [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 954.490726] env[62627]: DEBUG oslo_vmware.api [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 954.490726] env[62627]: value = "task-2194639" [ 954.490726] env[62627]: _type = "Task" [ 954.490726] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.501985] env[62627]: DEBUG oslo_vmware.api [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.538787] env[62627]: DEBUG nova.policy [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5e5eee141e496db2d2ea316c43e6be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e4f7ad3ebcf4c4d97bdbce58eec5a09', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 954.578350] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194638, 'name': ReconfigVM_Task, 'duration_secs': 0.285976} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.578654] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a/8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 954.579341] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6af9a552-0cac-4504-baf3-afe6603eab7e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.585804] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for the task: (returnval){ [ 954.585804] env[62627]: value = "task-2194640" [ 954.585804] env[62627]: _type = "Task" [ 954.585804] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.596499] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194640, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.786480] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2560e1f7-3420-4399-9633-1d0a91e9dd9d tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Lock "ae4ec050-5062-4e57-8090-f67f8c8013ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.341s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.912289] env[62627]: DEBUG oslo_vmware.api [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194637, 'name': PowerOnVM_Task, 'duration_secs': 0.889094} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.912540] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 954.912753] env[62627]: INFO nova.compute.manager [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Took 8.30 seconds to spawn the instance on the hypervisor. [ 954.912955] env[62627]: DEBUG nova.compute.manager [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 954.913774] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3294ff-34ba-4793-9c77-2bc15e4157f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.978926] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31685b41-bd15-4630-9003-60132d00afb0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.986374] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99badb0-1862-4936-859f-7f4cb5e84886 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.989972] env[62627]: DEBUG nova.compute.manager [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 955.000620] env[62627]: DEBUG nova.network.neutron [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Successfully created port: 4c3f3405-39a4-4c22-9c26-bdd0ab342c29 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 955.033512] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61ce99f-c0d0-40c8-bee7-14ed4bce42ad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.044405] env[62627]: DEBUG oslo_vmware.api [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194639, 'name': PowerOffVM_Task, 'duration_secs': 0.388938} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.045274] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 955.045452] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 955.046022] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4bb8e60-821a-4113-8de2-a8825dc2ade5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.051916] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062b8856-4743-405b-867a-4c2710dcc4ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.065791] env[62627]: DEBUG nova.compute.provider_tree [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.094797] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194640, 'name': Rename_Task, 'duration_secs': 0.136807} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.095092] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 955.095353] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7dbae3ef-4fa1-41c6-a455-f44505164c2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.100699] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for the task: (returnval){ [ 955.100699] env[62627]: value = "task-2194642" [ 955.100699] env[62627]: _type = "Task" [ 955.100699] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.108717] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.110929] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 955.111145] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 955.111324] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Deleting the datastore file [datastore1] 5d288606-02b6-4d11-b285-7325e48fb5b3 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 955.111560] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17928cdc-354d-4d4a-82fe-3efdf9db9627 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.117556] env[62627]: DEBUG oslo_vmware.api [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 955.117556] env[62627]: value = "task-2194643" [ 955.117556] env[62627]: _type = "Task" [ 955.117556] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.126417] env[62627]: DEBUG oslo_vmware.api [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.437918] env[62627]: INFO nova.compute.manager [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Took 45.55 seconds to build instance. [ 955.571606] env[62627]: DEBUG nova.scheduler.client.report [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 955.616259] env[62627]: DEBUG oslo_vmware.api [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194642, 'name': PowerOnVM_Task, 'duration_secs': 0.470345} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.616259] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.616259] env[62627]: INFO nova.compute.manager [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Took 4.34 seconds to spawn the instance on the hypervisor. [ 955.616259] env[62627]: DEBUG nova.compute.manager [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 955.616259] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d92670e-b550-42d2-8377-3e6f56f90cb1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.635273] env[62627]: DEBUG oslo_vmware.api [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194643, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.312121} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.635748] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 955.635930] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 955.636127] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 955.636679] env[62627]: INFO nova.compute.manager [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Took 1.17 seconds to destroy the instance on the hypervisor. [ 955.636930] env[62627]: DEBUG oslo.service.loopingcall [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.638019] env[62627]: DEBUG nova.compute.manager [-] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 955.638019] env[62627]: DEBUG nova.network.neutron [-] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 955.657182] env[62627]: DEBUG nova.compute.manager [req-965d03dc-973b-4811-826f-221ba814788b req-c66315b9-a58d-4965-a2a9-c616568243f1 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Received event network-changed-39451f1f-3666-45ef-8833-8b223f0961e5 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 955.657365] env[62627]: DEBUG nova.compute.manager [req-965d03dc-973b-4811-826f-221ba814788b req-c66315b9-a58d-4965-a2a9-c616568243f1 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Refreshing instance network info cache due to event network-changed-39451f1f-3666-45ef-8833-8b223f0961e5. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 955.657571] env[62627]: DEBUG oslo_concurrency.lockutils [req-965d03dc-973b-4811-826f-221ba814788b req-c66315b9-a58d-4965-a2a9-c616568243f1 service nova] Acquiring lock "refresh_cache-ae4ec050-5062-4e57-8090-f67f8c8013ff" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.657784] env[62627]: DEBUG oslo_concurrency.lockutils [req-965d03dc-973b-4811-826f-221ba814788b req-c66315b9-a58d-4965-a2a9-c616568243f1 service nova] Acquired lock "refresh_cache-ae4ec050-5062-4e57-8090-f67f8c8013ff" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.657862] env[62627]: DEBUG nova.network.neutron [req-965d03dc-973b-4811-826f-221ba814788b req-c66315b9-a58d-4965-a2a9-c616568243f1 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Refreshing network info cache for port 39451f1f-3666-45ef-8833-8b223f0961e5 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 955.939940] env[62627]: DEBUG oslo_concurrency.lockutils [None req-25fab27d-0d4e-4e58-9afe-3b40ddedf903 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.327s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.002045] env[62627]: DEBUG nova.compute.manager [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 956.033495] env[62627]: DEBUG nova.virt.hardware [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 956.033782] env[62627]: DEBUG nova.virt.hardware [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.033973] env[62627]: DEBUG nova.virt.hardware [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 956.034271] env[62627]: DEBUG nova.virt.hardware [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.034418] env[62627]: DEBUG nova.virt.hardware [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 956.034629] env[62627]: DEBUG nova.virt.hardware [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 956.035034] env[62627]: DEBUG nova.virt.hardware [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 956.035288] env[62627]: DEBUG nova.virt.hardware [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 956.035526] env[62627]: DEBUG nova.virt.hardware [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 956.035780] env[62627]: DEBUG nova.virt.hardware [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 956.036155] env[62627]: DEBUG nova.virt.hardware [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 956.037238] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5910911-27fc-4b49-864f-17d9db7a3a7a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.047099] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a99e9d4-097d-4491-a1f6-a2948da55f01 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.077144] env[62627]: DEBUG oslo_concurrency.lockutils [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.097s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.079541] env[62627]: DEBUG oslo_concurrency.lockutils [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.546s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.079770] env[62627]: DEBUG nova.objects.instance [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lazy-loading 'resources' on Instance uuid aed62160-2db5-4008-a491-2b27ae81ef5f {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.104907] env[62627]: INFO nova.scheduler.client.report [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Deleted allocations for instance 77c60059-535a-47ca-a5b0-45cc1df93b45 [ 956.143666] env[62627]: INFO nova.compute.manager [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Took 42.35 seconds to build instance. [ 956.356294] env[62627]: DEBUG nova.network.neutron [-] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.457526] env[62627]: DEBUG nova.compute.manager [req-3ab1b861-028b-4819-8901-45611ee0fb2d req-1927fbdf-2012-43a6-9002-984de52498b9 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Received event network-changed-332cfd7e-9934-429a-bc74-275d48f07fdb {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 956.457526] env[62627]: DEBUG nova.compute.manager [req-3ab1b861-028b-4819-8901-45611ee0fb2d req-1927fbdf-2012-43a6-9002-984de52498b9 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Refreshing instance network info cache due to event network-changed-332cfd7e-9934-429a-bc74-275d48f07fdb. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 956.458246] env[62627]: DEBUG oslo_concurrency.lockutils [req-3ab1b861-028b-4819-8901-45611ee0fb2d req-1927fbdf-2012-43a6-9002-984de52498b9 service nova] Acquiring lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.458576] env[62627]: DEBUG oslo_concurrency.lockutils [req-3ab1b861-028b-4819-8901-45611ee0fb2d req-1927fbdf-2012-43a6-9002-984de52498b9 service nova] Acquired lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.459240] env[62627]: DEBUG nova.network.neutron [req-3ab1b861-028b-4819-8901-45611ee0fb2d req-1927fbdf-2012-43a6-9002-984de52498b9 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Refreshing network info cache for port 332cfd7e-9934-429a-bc74-275d48f07fdb {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.487838] env[62627]: DEBUG nova.network.neutron [req-965d03dc-973b-4811-826f-221ba814788b req-c66315b9-a58d-4965-a2a9-c616568243f1 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Updated VIF entry in instance network info cache for port 39451f1f-3666-45ef-8833-8b223f0961e5. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 956.488269] env[62627]: DEBUG nova.network.neutron [req-965d03dc-973b-4811-826f-221ba814788b req-c66315b9-a58d-4965-a2a9-c616568243f1 service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Updating instance_info_cache with network_info: [{"id": "39451f1f-3666-45ef-8833-8b223f0961e5", "address": "fa:16:3e:98:66:ae", "network": {"id": "378f2580-0f19-413d-962d-b0d599d36125", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-493771480-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7865bca76c2e4ca5858347fed1ae38fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39451f1f-36", "ovs_interfaceid": "39451f1f-3666-45ef-8833-8b223f0961e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.620806] env[62627]: DEBUG oslo_concurrency.lockutils [None req-57d41992-3480-45de-bd64-78a5467b8811 tempest-SecurityGroupsTestJSON-1783259876 tempest-SecurityGroupsTestJSON-1783259876-project-member] Lock "77c60059-535a-47ca-a5b0-45cc1df93b45" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.753s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.646161] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ef0a41f-392f-46f2-84ae-49bbfd861fbb tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lock "8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.861s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.674384] env[62627]: DEBUG nova.network.neutron [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Successfully updated port: 4c3f3405-39a4-4c22-9c26-bdd0ab342c29 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 956.859236] env[62627]: INFO nova.compute.manager [-] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Took 1.22 seconds to deallocate network for instance. [ 956.928880] env[62627]: DEBUG nova.compute.manager [None req-a32fd42b-e26e-486c-be7c-d90d0c0dd69b tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 956.929826] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b415fc75-d639-4bb5-8df8-7a66f87aeab9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.990819] env[62627]: DEBUG oslo_concurrency.lockutils [req-965d03dc-973b-4811-826f-221ba814788b req-c66315b9-a58d-4965-a2a9-c616568243f1 service nova] Releasing lock "refresh_cache-ae4ec050-5062-4e57-8090-f67f8c8013ff" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.084712] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e363b6-bbfb-48a1-a971-124746813c42 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.094592] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a981fda5-6655-4f9c-b56f-a2ee0d0590b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.132407] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquiring lock "8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.132807] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lock "8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.133396] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquiring lock "8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.133396] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lock "8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.133616] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lock "8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.139788] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b1532e-5648-429c-90ae-eb28eb44edd0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.142991] env[62627]: INFO nova.compute.manager [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Terminating instance [ 957.151412] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8ebcbb-b842-4d48-b4ed-36515be0d934 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.168344] env[62627]: DEBUG nova.compute.provider_tree [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.178239] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.178323] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.178530] env[62627]: DEBUG nova.network.neutron [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 957.295319] env[62627]: DEBUG nova.network.neutron [req-3ab1b861-028b-4819-8901-45611ee0fb2d req-1927fbdf-2012-43a6-9002-984de52498b9 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updated VIF entry in instance network info cache for port 332cfd7e-9934-429a-bc74-275d48f07fdb. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 957.295702] env[62627]: DEBUG nova.network.neutron [req-3ab1b861-028b-4819-8901-45611ee0fb2d req-1927fbdf-2012-43a6-9002-984de52498b9 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance_info_cache with network_info: [{"id": "332cfd7e-9934-429a-bc74-275d48f07fdb", "address": "fa:16:3e:68:4c:72", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap332cfd7e-99", "ovs_interfaceid": "332cfd7e-9934-429a-bc74-275d48f07fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.368802] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.440751] env[62627]: INFO nova.compute.manager [None req-a32fd42b-e26e-486c-be7c-d90d0c0dd69b tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] instance snapshotting [ 957.441390] env[62627]: DEBUG nova.objects.instance [None req-a32fd42b-e26e-486c-be7c-d90d0c0dd69b tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lazy-loading 'flavor' on Instance uuid 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.651699] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquiring lock "refresh_cache-8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.651888] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquired lock "refresh_cache-8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.652083] env[62627]: DEBUG nova.network.neutron [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 957.670422] env[62627]: DEBUG nova.scheduler.client.report [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 958.416456] env[62627]: DEBUG nova.network.neutron [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 958.419561] env[62627]: DEBUG oslo_concurrency.lockutils [req-3ab1b861-028b-4819-8901-45611ee0fb2d req-1927fbdf-2012-43a6-9002-984de52498b9 service nova] Releasing lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.424060] env[62627]: DEBUG oslo_concurrency.lockutils [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.345s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.426577] env[62627]: DEBUG nova.compute.manager [req-de60534f-f771-4f71-bc51-16149f8ae25f req-8223a0a4-87c9-46b2-a390-3f4073186558 service nova] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Received event network-vif-deleted-383a9402-570b-4677-b3fd-f0854a5bb40f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 958.428041] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abba929a-3329-45d9-852e-4d99cb58fa8c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.432024] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.041s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.432024] env[62627]: DEBUG nova.objects.instance [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 958.448832] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce2b98d-0072-4b94-bc8e-9a57c98a5eac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.453433] env[62627]: INFO nova.scheduler.client.report [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Deleted allocations for instance aed62160-2db5-4008-a491-2b27ae81ef5f [ 958.468023] env[62627]: DEBUG nova.network.neutron [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 958.483644] env[62627]: DEBUG nova.compute.manager [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received event network-vif-plugged-4c3f3405-39a4-4c22-9c26-bdd0ab342c29 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 958.483734] env[62627]: DEBUG oslo_concurrency.lockutils [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] Acquiring lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.483955] env[62627]: DEBUG oslo_concurrency.lockutils [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.484111] env[62627]: DEBUG oslo_concurrency.lockutils [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.484273] env[62627]: DEBUG nova.compute.manager [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] No waiting events found dispatching network-vif-plugged-4c3f3405-39a4-4c22-9c26-bdd0ab342c29 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 958.484430] env[62627]: WARNING nova.compute.manager [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received unexpected event network-vif-plugged-4c3f3405-39a4-4c22-9c26-bdd0ab342c29 for instance with vm_state building and task_state spawning. [ 958.484580] env[62627]: DEBUG nova.compute.manager [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received event network-changed-4c3f3405-39a4-4c22-9c26-bdd0ab342c29 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 958.484723] env[62627]: DEBUG nova.compute.manager [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Refreshing instance network info cache due to event network-changed-4c3f3405-39a4-4c22-9c26-bdd0ab342c29. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 958.484910] env[62627]: DEBUG oslo_concurrency.lockutils [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] Acquiring lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.553603] env[62627]: DEBUG nova.network.neutron [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.581506] env[62627]: DEBUG nova.network.neutron [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updating instance_info_cache with network_info: [{"id": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "address": "fa:16:3e:05:76:ef", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3f3405-39", "ovs_interfaceid": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.960529] env[62627]: DEBUG oslo_concurrency.lockutils [None req-322ff823-8468-4e75-a042-b370d3032725 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "aed62160-2db5-4008-a491-2b27ae81ef5f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.109s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.968708] env[62627]: DEBUG nova.compute.manager [None req-a32fd42b-e26e-486c-be7c-d90d0c0dd69b tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Instance disappeared during snapshot {{(pid=62627) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 959.056888] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Releasing lock "refresh_cache-8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.056964] env[62627]: DEBUG nova.compute.manager [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 959.057140] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.058035] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7b5d9a-f4ff-4025-9e61-95d5c6527b9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.067145] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.067145] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d233fa7-b123-43f0-aaa9-018297edda7c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.073443] env[62627]: DEBUG oslo_vmware.api [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for the task: (returnval){ [ 959.073443] env[62627]: value = "task-2194644" [ 959.073443] env[62627]: _type = "Task" [ 959.073443] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.082869] env[62627]: DEBUG oslo_vmware.api [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.085981] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.086289] env[62627]: DEBUG nova.compute.manager [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Instance network_info: |[{"id": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "address": "fa:16:3e:05:76:ef", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3f3405-39", "ovs_interfaceid": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 959.086745] env[62627]: DEBUG oslo_concurrency.lockutils [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] Acquired lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.086989] env[62627]: DEBUG nova.network.neutron [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Refreshing network info cache for port 4c3f3405-39a4-4c22-9c26-bdd0ab342c29 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.088051] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:76:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '023d6500-887e-4dc4-bec5-06b40450d9c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c3f3405-39a4-4c22-9c26-bdd0ab342c29', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.102684] env[62627]: DEBUG oslo.service.loopingcall [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.106222] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 959.106755] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-224b753e-f78a-49e9-a9c3-15096cb66c0d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.128799] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.128799] env[62627]: value = "task-2194645" [ 959.128799] env[62627]: _type = "Task" [ 959.128799] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.133837] env[62627]: DEBUG nova.compute.manager [None req-a32fd42b-e26e-486c-be7c-d90d0c0dd69b tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Found 0 images (rotation: 2) {{(pid=62627) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 959.143023] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194645, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.408356] env[62627]: DEBUG nova.network.neutron [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updated VIF entry in instance network info cache for port 4c3f3405-39a4-4c22-9c26-bdd0ab342c29. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 959.408722] env[62627]: DEBUG nova.network.neutron [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updating instance_info_cache with network_info: [{"id": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "address": "fa:16:3e:05:76:ef", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3f3405-39", "ovs_interfaceid": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.442762] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240a3f6e-1c21-489f-82f8-674f9812c015 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.445147] env[62627]: DEBUG oslo_concurrency.lockutils [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.038s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.445147] env[62627]: DEBUG nova.objects.instance [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 959.586696] env[62627]: DEBUG oslo_vmware.api [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194644, 'name': PowerOffVM_Task, 'duration_secs': 0.132684} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.587177] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 959.587424] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 959.587748] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22387b88-31da-47b3-8ec3-0491d0d722cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.618275] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 959.618607] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 959.618858] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Deleting the datastore file [datastore2] 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 959.619176] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b25b3505-5de8-4628-be97-1dfd13caf346 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.627573] env[62627]: DEBUG oslo_vmware.api [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for the task: (returnval){ [ 959.627573] env[62627]: value = "task-2194647" [ 959.627573] env[62627]: _type = "Task" [ 959.627573] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.640031] env[62627]: DEBUG oslo_vmware.api [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194647, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.643563] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194645, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.735859] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.736128] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.736325] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.736505] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.736997] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.740172] env[62627]: INFO nova.compute.manager [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Terminating instance [ 959.911585] env[62627]: DEBUG oslo_concurrency.lockutils [req-ac9f6a0b-e877-4f32-b81d-284b47dce5d5 req-a76b78d4-29b5-431c-b8e1-8dcba8bba56e service nova] Releasing lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.143283] env[62627]: DEBUG oslo_vmware.api [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Task: {'id': task-2194647, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.29234} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.149747] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.149747] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 960.149747] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 960.149747] env[62627]: INFO nova.compute.manager [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Took 1.09 seconds to destroy the instance on the hypervisor. [ 960.149747] env[62627]: DEBUG oslo.service.loopingcall [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.149974] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194645, 'name': CreateVM_Task, 'duration_secs': 0.599509} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.150182] env[62627]: DEBUG nova.compute.manager [-] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 960.150344] env[62627]: DEBUG nova.network.neutron [-] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 960.152639] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 960.153413] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.153936] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.154117] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 960.154849] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a30a1080-4712-4ae6-8aba-621b390acd69 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.161053] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 960.161053] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a5c275-f19e-d99e-cb84-d1e10cd6b2f3" [ 960.161053] env[62627]: _type = "Task" [ 960.161053] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.175137] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a5c275-f19e-d99e-cb84-d1e10cd6b2f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.177637] env[62627]: DEBUG nova.network.neutron [-] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 960.244123] env[62627]: DEBUG nova.compute.manager [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 960.247318] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 960.247318] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b6c1d4-ba36-4cfd-bf78-cbff2a31a4c6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.255702] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.256076] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcdb3df1-e95a-444e-9d20-209a1d86b48e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.263081] env[62627]: DEBUG oslo_vmware.api [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 960.263081] env[62627]: value = "task-2194648" [ 960.263081] env[62627]: _type = "Task" [ 960.263081] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.274944] env[62627]: DEBUG oslo_vmware.api [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.460020] env[62627]: DEBUG oslo_concurrency.lockutils [None req-562975ae-12b3-400e-ae85-877c5517e25a tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.460020] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.948s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.460020] env[62627]: DEBUG nova.objects.instance [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lazy-loading 'resources' on Instance uuid e76bdb57-4a30-4222-ad4c-d95e4e6193cb {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.671651] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a5c275-f19e-d99e-cb84-d1e10cd6b2f3, 'name': SearchDatastore_Task, 'duration_secs': 0.013693} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.671651] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.671848] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.672087] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.672236] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.672411] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.672682] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ecbe5fd2-bd81-4d92-9e97-336db2cd909a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.679949] env[62627]: DEBUG nova.network.neutron [-] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.688263] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.688481] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 960.690154] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddbdff22-c1d7-460a-a70a-660ea0a24eaa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.696405] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 960.696405] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52393194-1a83-042b-8ea0-9ccdcca42381" [ 960.696405] env[62627]: _type = "Task" [ 960.696405] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.704584] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52393194-1a83-042b-8ea0-9ccdcca42381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.775065] env[62627]: DEBUG oslo_vmware.api [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194648, 'name': PowerOffVM_Task, 'duration_secs': 0.489748} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.775350] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.775518] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 960.776689] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3ef49c3-f67e-42ef-bd58-5b0f20ae2b5f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.852569] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 960.852825] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 960.853072] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Deleting the datastore file [datastore2] cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.853352] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-345f8680-a8b2-4142-8ad7-0be1567a52e0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.863138] env[62627]: DEBUG oslo_vmware.api [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for the task: (returnval){ [ 960.863138] env[62627]: value = "task-2194650" [ 960.863138] env[62627]: _type = "Task" [ 960.863138] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.871192] env[62627]: DEBUG oslo_vmware.api [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.181946] env[62627]: INFO nova.compute.manager [-] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Took 1.03 seconds to deallocate network for instance. [ 961.210599] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52393194-1a83-042b-8ea0-9ccdcca42381, 'name': SearchDatastore_Task, 'duration_secs': 0.015321} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.212349] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e8c6de3-cce4-4101-8954-daf75d65d26e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.216650] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 961.216650] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52909274-a245-a8f4-5b39-68099122385c" [ 961.216650] env[62627]: _type = "Task" [ 961.216650] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.227780] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52909274-a245-a8f4-5b39-68099122385c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.374149] env[62627]: DEBUG oslo_vmware.api [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Task: {'id': task-2194650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.315407} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.374241] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.374443] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.374610] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.374783] env[62627]: INFO nova.compute.manager [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 961.375058] env[62627]: DEBUG oslo.service.loopingcall [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 961.375248] env[62627]: DEBUG nova.compute.manager [-] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 961.375343] env[62627]: DEBUG nova.network.neutron [-] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.514855] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a2eb73-437a-42bf-b841-ad09345c6808 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.526264] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f000a4-0202-41d5-a68d-ec593ec0a23c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.559086] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e2c0be-8fff-409d-9a8f-e859d27a61f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.567270] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c92de98-4dbb-4887-abe4-d3b963cc9868 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.582737] env[62627]: DEBUG nova.compute.provider_tree [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.688591] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.729449] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52909274-a245-a8f4-5b39-68099122385c, 'name': SearchDatastore_Task, 'duration_secs': 0.012267} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.729902] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.730953] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 84b8381c-bdd4-462e-aa98-0cc972f00882/84b8381c-bdd4-462e-aa98-0cc972f00882.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 961.731097] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4b30845-1d6a-4497-bd17-23dc9bb0e1a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.738834] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 961.738834] env[62627]: value = "task-2194651" [ 961.738834] env[62627]: _type = "Task" [ 961.738834] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.747802] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.809867] env[62627]: DEBUG nova.compute.manager [req-852a41d6-4e00-4a1a-857a-635412bfa0b4 req-aaae8a7b-2bd0-4474-9d66-2d59feb2c337 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Received event network-vif-deleted-abd8176d-4f10-4dcc-9b49-f53a48ec4cc4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 961.810080] env[62627]: INFO nova.compute.manager [req-852a41d6-4e00-4a1a-857a-635412bfa0b4 req-aaae8a7b-2bd0-4474-9d66-2d59feb2c337 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Neutron deleted interface abd8176d-4f10-4dcc-9b49-f53a48ec4cc4; detaching it from the instance and deleting it from the info cache [ 961.810258] env[62627]: DEBUG nova.network.neutron [req-852a41d6-4e00-4a1a-857a-635412bfa0b4 req-aaae8a7b-2bd0-4474-9d66-2d59feb2c337 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.085626] env[62627]: DEBUG nova.scheduler.client.report [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.249358] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194651, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.286902] env[62627]: DEBUG nova.network.neutron [-] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.315291] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce7fa749-1675-4f9d-9c6e-32a118bf1cc5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.324291] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17b19a6-9a65-4cff-84ee-c4cb301eab60 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.365889] env[62627]: DEBUG nova.compute.manager [req-852a41d6-4e00-4a1a-857a-635412bfa0b4 req-aaae8a7b-2bd0-4474-9d66-2d59feb2c337 service nova] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Detach interface failed, port_id=abd8176d-4f10-4dcc-9b49-f53a48ec4cc4, reason: Instance cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 962.592261] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.133s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.596635] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.650s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.597670] env[62627]: INFO nova.compute.claims [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.632881] env[62627]: INFO nova.scheduler.client.report [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleted allocations for instance e76bdb57-4a30-4222-ad4c-d95e4e6193cb [ 962.750927] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194651, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.89158} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.751269] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 84b8381c-bdd4-462e-aa98-0cc972f00882/84b8381c-bdd4-462e-aa98-0cc972f00882.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 962.751571] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 962.751753] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e5524ad2-3df0-44f2-bf6c-6453cdb7daf3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.759288] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 962.759288] env[62627]: value = "task-2194652" [ 962.759288] env[62627]: _type = "Task" [ 962.759288] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.768845] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194652, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.790477] env[62627]: INFO nova.compute.manager [-] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Took 1.41 seconds to deallocate network for instance. [ 963.140360] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1d35545e-d84c-41b3-acf8-65a4c5576943 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e76bdb57-4a30-4222-ad4c-d95e4e6193cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.581s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.268477] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194652, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128395} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.268748] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 963.269527] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4afd60-d52e-48ee-80a5-7829fd114242 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.292711] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] 84b8381c-bdd4-462e-aa98-0cc972f00882/84b8381c-bdd4-462e-aa98-0cc972f00882.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 963.292965] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29c09816-19a3-4ee7-af51-c9e15cbc788e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.309692] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.315395] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 963.315395] env[62627]: value = "task-2194653" [ 963.315395] env[62627]: _type = "Task" [ 963.315395] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.323421] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194653, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.828869] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194653, 'name': ReconfigVM_Task, 'duration_secs': 0.300389} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.831461] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Reconfigured VM instance instance-0000003d to attach disk [datastore2] 84b8381c-bdd4-462e-aa98-0cc972f00882/84b8381c-bdd4-462e-aa98-0cc972f00882.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.832919] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6948bb9e-d8bd-4ae0-b88b-f78c06b3107e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.845747] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 963.845747] env[62627]: value = "task-2194654" [ 963.845747] env[62627]: _type = "Task" [ 963.845747] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.854661] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194654, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.088243] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da55acb-239f-42e0-becc-cca7a79d61d1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.096595] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d3e5ce-4b9c-4def-bea8-e37faf96ef44 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.126318] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252adfb9-fc12-451c-a1b8-364cdd770b38 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.134068] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343e6239-8bd2-4d0f-88be-db967586d865 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.148525] env[62627]: DEBUG nova.compute.provider_tree [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.356757] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194654, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.652456] env[62627]: DEBUG nova.scheduler.client.report [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.867572] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194654, 'name': Rename_Task, 'duration_secs': 0.820619} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.867948] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 964.868274] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74f96680-786e-46ff-b1a3-752edc90189d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.875045] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 964.875045] env[62627]: value = "task-2194655" [ 964.875045] env[62627]: _type = "Task" [ 964.875045] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.886734] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194655, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.158458] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.159405] env[62627]: DEBUG nova.compute.manager [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 965.161749] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.854s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.161963] env[62627]: DEBUG nova.objects.instance [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lazy-loading 'resources' on Instance uuid 7804914c-7df8-45c2-a934-67aa73afb2e2 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.386592] env[62627]: DEBUG oslo_vmware.api [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2194655, 'name': PowerOnVM_Task, 'duration_secs': 0.458076} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.386879] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 965.387097] env[62627]: INFO nova.compute.manager [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Took 9.38 seconds to spawn the instance on the hypervisor. [ 965.387377] env[62627]: DEBUG nova.compute.manager [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 965.388171] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69fbfa6-6893-4e9b-9c26-198c0750e6a6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.667272] env[62627]: DEBUG nova.compute.utils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 965.670373] env[62627]: DEBUG nova.compute.manager [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 965.675192] env[62627]: DEBUG nova.network.neutron [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 965.738240] env[62627]: DEBUG nova.policy [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2232b2002014411a9183e2139700ea98', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '711f8ab1a2d5466399f408870e412587', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 965.874176] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquiring lock "9adb2baf-315f-4122-a657-cb99757002ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.874499] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Lock "9adb2baf-315f-4122-a657-cb99757002ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.910346] env[62627]: INFO nova.compute.manager [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Took 45.90 seconds to build instance. [ 966.128095] env[62627]: DEBUG nova.network.neutron [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Successfully created port: 43ceb60f-795b-49ef-b06d-892b278ac067 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 966.141403] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b0518c-60f2-41cf-aeda-3201166eff11 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.149238] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd06aa4c-dd3f-40b3-b426-3919cccef067 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.179501] env[62627]: DEBUG nova.compute.manager [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 966.183550] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79b8f23-7371-4fd8-8e2a-675cc7d1488c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.191344] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c13987-fd50-4bb3-9da7-09f5d8e4a999 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.208774] env[62627]: DEBUG nova.compute.provider_tree [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.377533] env[62627]: DEBUG nova.compute.manager [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 966.412551] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cac29609-8498-41d9-8f38-b47ef29db5d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.413s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.710982] env[62627]: DEBUG nova.scheduler.client.report [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 966.896574] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.178823] env[62627]: DEBUG nova.compute.manager [req-cbf6853f-b885-4970-a999-2d979960d924 req-d9b938ae-1b4d-4c42-9d60-5574e750ec0c service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received event network-changed-4c3f3405-39a4-4c22-9c26-bdd0ab342c29 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 967.178823] env[62627]: DEBUG nova.compute.manager [req-cbf6853f-b885-4970-a999-2d979960d924 req-d9b938ae-1b4d-4c42-9d60-5574e750ec0c service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Refreshing instance network info cache due to event network-changed-4c3f3405-39a4-4c22-9c26-bdd0ab342c29. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 967.180741] env[62627]: DEBUG oslo_concurrency.lockutils [req-cbf6853f-b885-4970-a999-2d979960d924 req-d9b938ae-1b4d-4c42-9d60-5574e750ec0c service nova] Acquiring lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.180741] env[62627]: DEBUG oslo_concurrency.lockutils [req-cbf6853f-b885-4970-a999-2d979960d924 req-d9b938ae-1b4d-4c42-9d60-5574e750ec0c service nova] Acquired lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.180741] env[62627]: DEBUG nova.network.neutron [req-cbf6853f-b885-4970-a999-2d979960d924 req-d9b938ae-1b4d-4c42-9d60-5574e750ec0c service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Refreshing network info cache for port 4c3f3405-39a4-4c22-9c26-bdd0ab342c29 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 967.192787] env[62627]: DEBUG nova.compute.manager [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 967.215768] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.054s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.217829] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.876s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.219914] env[62627]: INFO nova.compute.claims [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 967.224467] env[62627]: DEBUG nova.virt.hardware [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:34:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='914075967',id=40,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-837200530',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 967.224694] env[62627]: DEBUG nova.virt.hardware [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 967.224871] env[62627]: DEBUG nova.virt.hardware [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 967.225116] env[62627]: DEBUG nova.virt.hardware [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 967.225272] env[62627]: DEBUG nova.virt.hardware [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 967.225417] env[62627]: DEBUG nova.virt.hardware [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 967.225616] env[62627]: DEBUG nova.virt.hardware [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 967.225771] env[62627]: DEBUG nova.virt.hardware [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 967.225938] env[62627]: DEBUG nova.virt.hardware [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 967.226121] env[62627]: DEBUG nova.virt.hardware [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 967.226293] env[62627]: DEBUG nova.virt.hardware [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 967.227360] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbd284f-eb91-4cec-bb2c-a909e63efd3a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.235901] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8492f70d-401e-41c1-9e3b-a255ef8f4241 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.241141] env[62627]: INFO nova.scheduler.client.report [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Deleted allocations for instance 7804914c-7df8-45c2-a934-67aa73afb2e2 [ 967.758186] env[62627]: DEBUG oslo_concurrency.lockutils [None req-896b61e2-43d3-4c8e-b0d1-2bea7e25f400 tempest-ServerShowV257Test-2139404517 tempest-ServerShowV257Test-2139404517-project-member] Lock "7804914c-7df8-45c2-a934-67aa73afb2e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.993s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.846172] env[62627]: DEBUG nova.compute.manager [req-75d0cf09-43df-4217-af5e-a81e381997ff req-b87ce216-cf20-4874-ab21-da15eb58e19a service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Received event network-vif-plugged-43ceb60f-795b-49ef-b06d-892b278ac067 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 967.846172] env[62627]: DEBUG oslo_concurrency.lockutils [req-75d0cf09-43df-4217-af5e-a81e381997ff req-b87ce216-cf20-4874-ab21-da15eb58e19a service nova] Acquiring lock "f6357f01-d7cb-4477-be2d-98fc17d7a940-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.850223] env[62627]: DEBUG oslo_concurrency.lockutils [req-75d0cf09-43df-4217-af5e-a81e381997ff req-b87ce216-cf20-4874-ab21-da15eb58e19a service nova] Lock "f6357f01-d7cb-4477-be2d-98fc17d7a940-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.850223] env[62627]: DEBUG oslo_concurrency.lockutils [req-75d0cf09-43df-4217-af5e-a81e381997ff req-b87ce216-cf20-4874-ab21-da15eb58e19a service nova] Lock "f6357f01-d7cb-4477-be2d-98fc17d7a940-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.850223] env[62627]: DEBUG nova.compute.manager [req-75d0cf09-43df-4217-af5e-a81e381997ff req-b87ce216-cf20-4874-ab21-da15eb58e19a service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] No waiting events found dispatching network-vif-plugged-43ceb60f-795b-49ef-b06d-892b278ac067 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 967.850223] env[62627]: WARNING nova.compute.manager [req-75d0cf09-43df-4217-af5e-a81e381997ff req-b87ce216-cf20-4874-ab21-da15eb58e19a service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Received unexpected event network-vif-plugged-43ceb60f-795b-49ef-b06d-892b278ac067 for instance with vm_state building and task_state spawning. [ 967.978048] env[62627]: DEBUG nova.network.neutron [req-cbf6853f-b885-4970-a999-2d979960d924 req-d9b938ae-1b4d-4c42-9d60-5574e750ec0c service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updated VIF entry in instance network info cache for port 4c3f3405-39a4-4c22-9c26-bdd0ab342c29. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 967.978494] env[62627]: DEBUG nova.network.neutron [req-cbf6853f-b885-4970-a999-2d979960d924 req-d9b938ae-1b4d-4c42-9d60-5574e750ec0c service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updating instance_info_cache with network_info: [{"id": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "address": "fa:16:3e:05:76:ef", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3f3405-39", "ovs_interfaceid": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.025153] env[62627]: DEBUG nova.network.neutron [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Successfully updated port: 43ceb60f-795b-49ef-b06d-892b278ac067 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 968.481623] env[62627]: DEBUG oslo_concurrency.lockutils [req-cbf6853f-b885-4970-a999-2d979960d924 req-d9b938ae-1b4d-4c42-9d60-5574e750ec0c service nova] Releasing lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.527747] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "refresh_cache-f6357f01-d7cb-4477-be2d-98fc17d7a940" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.527935] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquired lock "refresh_cache-f6357f01-d7cb-4477-be2d-98fc17d7a940" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.528799] env[62627]: DEBUG nova.network.neutron [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.603543] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73615c09-9da4-4024-b247-d85043bc84c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.612054] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1f3a8a-9121-492a-8a7b-48cbae5e794a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.641300] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9624f265-1e40-45a4-9948-0eaeed728ae1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.649093] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cec81c2-913d-46ac-8f27-8ff7980ea8be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.664623] env[62627]: DEBUG nova.compute.provider_tree [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.079321] env[62627]: DEBUG nova.network.neutron [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 969.167962] env[62627]: DEBUG nova.scheduler.client.report [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 969.226972] env[62627]: DEBUG nova.network.neutron [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Updating instance_info_cache with network_info: [{"id": "43ceb60f-795b-49ef-b06d-892b278ac067", "address": "fa:16:3e:4a:6c:ac", "network": {"id": "e4275dc3-df4b-49c1-9677-3c0fc9408d2e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1014582117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711f8ab1a2d5466399f408870e412587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ceb60f-79", "ovs_interfaceid": "43ceb60f-795b-49ef-b06d-892b278ac067", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.673025] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.455s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.673583] env[62627]: DEBUG nova.compute.manager [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 969.676248] env[62627]: DEBUG oslo_concurrency.lockutils [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.385s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.676416] env[62627]: DEBUG nova.objects.instance [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 969.730871] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Releasing lock "refresh_cache-f6357f01-d7cb-4477-be2d-98fc17d7a940" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.731205] env[62627]: DEBUG nova.compute.manager [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Instance network_info: |[{"id": "43ceb60f-795b-49ef-b06d-892b278ac067", "address": "fa:16:3e:4a:6c:ac", "network": {"id": "e4275dc3-df4b-49c1-9677-3c0fc9408d2e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1014582117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711f8ab1a2d5466399f408870e412587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ceb60f-79", "ovs_interfaceid": "43ceb60f-795b-49ef-b06d-892b278ac067", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 969.731595] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:6c:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e83154-c0d2-4d3d-b95e-3cd5ba336257', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43ceb60f-795b-49ef-b06d-892b278ac067', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 969.739042] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Creating folder: Project (711f8ab1a2d5466399f408870e412587). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 969.739302] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbf006e9-211f-449b-a2ad-cbf3dab1a9d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.749116] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Created folder: Project (711f8ab1a2d5466399f408870e412587) in parent group-v447541. [ 969.749298] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Creating folder: Instances. Parent ref: group-v447712. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 969.749515] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8239855f-5866-4e94-9aec-0b55e8106d1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.759085] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Created folder: Instances in parent group-v447712. [ 969.759085] env[62627]: DEBUG oslo.service.loopingcall [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 969.759085] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 969.759085] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05e28acd-f743-4f69-aa5d-45942cd444b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.778181] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 969.778181] env[62627]: value = "task-2194658" [ 969.778181] env[62627]: _type = "Task" [ 969.778181] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.788632] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194658, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.870748] env[62627]: DEBUG nova.compute.manager [req-d65a8a35-4d80-4a22-ad33-eb293242aeaf req-f16b1328-040c-4eff-bdf8-f203c2f569e5 service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Received event network-changed-43ceb60f-795b-49ef-b06d-892b278ac067 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 969.870913] env[62627]: DEBUG nova.compute.manager [req-d65a8a35-4d80-4a22-ad33-eb293242aeaf req-f16b1328-040c-4eff-bdf8-f203c2f569e5 service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Refreshing instance network info cache due to event network-changed-43ceb60f-795b-49ef-b06d-892b278ac067. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 969.871144] env[62627]: DEBUG oslo_concurrency.lockutils [req-d65a8a35-4d80-4a22-ad33-eb293242aeaf req-f16b1328-040c-4eff-bdf8-f203c2f569e5 service nova] Acquiring lock "refresh_cache-f6357f01-d7cb-4477-be2d-98fc17d7a940" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.871290] env[62627]: DEBUG oslo_concurrency.lockutils [req-d65a8a35-4d80-4a22-ad33-eb293242aeaf req-f16b1328-040c-4eff-bdf8-f203c2f569e5 service nova] Acquired lock "refresh_cache-f6357f01-d7cb-4477-be2d-98fc17d7a940" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.871501] env[62627]: DEBUG nova.network.neutron [req-d65a8a35-4d80-4a22-ad33-eb293242aeaf req-f16b1328-040c-4eff-bdf8-f203c2f569e5 service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Refreshing network info cache for port 43ceb60f-795b-49ef-b06d-892b278ac067 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 970.184071] env[62627]: DEBUG nova.compute.utils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 970.185343] env[62627]: DEBUG nova.compute.manager [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 970.185513] env[62627]: DEBUG nova.network.neutron [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 970.226406] env[62627]: DEBUG nova.policy [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d6a2c615f034c49836031736531061c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01b0f3fe63f64e1183acc0fec83bb3f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 970.288016] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194658, 'name': CreateVM_Task, 'duration_secs': 0.307156} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.288267] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 970.289045] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.289221] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.289541] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 970.289790] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f701aeb-a068-48e6-9c28-b14c0fba80bf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.294342] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 970.294342] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a00042-24c2-69b0-f159-02dfe51efc67" [ 970.294342] env[62627]: _type = "Task" [ 970.294342] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.302127] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a00042-24c2-69b0-f159-02dfe51efc67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.507096] env[62627]: DEBUG nova.network.neutron [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Successfully created port: fcb37279-335e-48f7-8d54-32ecee8fa761 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 970.571075] env[62627]: DEBUG nova.network.neutron [req-d65a8a35-4d80-4a22-ad33-eb293242aeaf req-f16b1328-040c-4eff-bdf8-f203c2f569e5 service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Updated VIF entry in instance network info cache for port 43ceb60f-795b-49ef-b06d-892b278ac067. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 970.571556] env[62627]: DEBUG nova.network.neutron [req-d65a8a35-4d80-4a22-ad33-eb293242aeaf req-f16b1328-040c-4eff-bdf8-f203c2f569e5 service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Updating instance_info_cache with network_info: [{"id": "43ceb60f-795b-49ef-b06d-892b278ac067", "address": "fa:16:3e:4a:6c:ac", "network": {"id": "e4275dc3-df4b-49c1-9677-3c0fc9408d2e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1014582117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711f8ab1a2d5466399f408870e412587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ceb60f-79", "ovs_interfaceid": "43ceb60f-795b-49ef-b06d-892b278ac067", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.694026] env[62627]: DEBUG oslo_concurrency.lockutils [None req-707b0bce-e894-4d20-a873-8f4e0993309c tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.695016] env[62627]: DEBUG nova.compute.manager [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 970.697676] env[62627]: DEBUG oslo_concurrency.lockutils [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.251s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.697893] env[62627]: DEBUG nova.objects.instance [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lazy-loading 'resources' on Instance uuid d0786233-087b-4000-8c37-fd5c55cc0ef4 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.804155] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a00042-24c2-69b0-f159-02dfe51efc67, 'name': SearchDatastore_Task, 'duration_secs': 0.010082} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.804458] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.804695] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 970.805013] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.805195] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.805378] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 970.805634] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24337f53-d3be-4281-9f2b-4f660bbb218b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.817942] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 970.818134] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 970.818822] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b78d2e76-f764-4465-a971-3d8ccb7ea889 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.823624] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 970.823624] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ea6deb-6983-b8d6-2020-56cabfa5a5d5" [ 970.823624] env[62627]: _type = "Task" [ 970.823624] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.831149] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ea6deb-6983-b8d6-2020-56cabfa5a5d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.075645] env[62627]: DEBUG oslo_concurrency.lockutils [req-d65a8a35-4d80-4a22-ad33-eb293242aeaf req-f16b1328-040c-4eff-bdf8-f203c2f569e5 service nova] Releasing lock "refresh_cache-f6357f01-d7cb-4477-be2d-98fc17d7a940" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.335663] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ea6deb-6983-b8d6-2020-56cabfa5a5d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009354} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.339026] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea87c08b-4895-476e-a28d-a5dfdd4b2202 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.343651] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 971.343651] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ff7ffc-85b8-740c-61d2-0c26fdb22ef8" [ 971.343651] env[62627]: _type = "Task" [ 971.343651] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.353212] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ff7ffc-85b8-740c-61d2-0c26fdb22ef8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.527436] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb1969c-1482-4095-82c7-f0d897ce10b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.534842] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77566643-f7c8-4413-b107-4fd6be739865 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.563428] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550e376c-556e-4fc4-9092-fa3e14d3dc9c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.570359] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f31103-561c-45e2-8ab3-5294fe5a3ed4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.584627] env[62627]: DEBUG nova.compute.provider_tree [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.707373] env[62627]: DEBUG nova.compute.manager [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 971.733356] env[62627]: DEBUG nova.virt.hardware [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 971.733602] env[62627]: DEBUG nova.virt.hardware [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.733754] env[62627]: DEBUG nova.virt.hardware [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 971.733930] env[62627]: DEBUG nova.virt.hardware [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.734088] env[62627]: DEBUG nova.virt.hardware [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 971.734275] env[62627]: DEBUG nova.virt.hardware [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 971.734504] env[62627]: DEBUG nova.virt.hardware [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 971.734664] env[62627]: DEBUG nova.virt.hardware [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 971.734831] env[62627]: DEBUG nova.virt.hardware [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 971.735033] env[62627]: DEBUG nova.virt.hardware [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 971.735587] env[62627]: DEBUG nova.virt.hardware [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 971.736065] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2be0a7-810e-41ab-af3c-93253d67a21c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.744307] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bdad08-597c-4d64-a324-398d452ab1be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.853662] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ff7ffc-85b8-740c-61d2-0c26fdb22ef8, 'name': SearchDatastore_Task, 'duration_secs': 0.008683} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.853792] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.854498] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f6357f01-d7cb-4477-be2d-98fc17d7a940/f6357f01-d7cb-4477-be2d-98fc17d7a940.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 971.854498] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b8f4d6d-6e0c-433e-a894-0a0caf9cb6d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.861578] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 971.861578] env[62627]: value = "task-2194659" [ 971.861578] env[62627]: _type = "Task" [ 971.861578] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.868715] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2194659, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.989339] env[62627]: DEBUG nova.compute.manager [req-67992e3a-96e2-4dc9-959c-417a12864f63 req-174f77b8-f2ae-4605-8b30-dc26537bb596 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Received event network-vif-plugged-fcb37279-335e-48f7-8d54-32ecee8fa761 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 971.989569] env[62627]: DEBUG oslo_concurrency.lockutils [req-67992e3a-96e2-4dc9-959c-417a12864f63 req-174f77b8-f2ae-4605-8b30-dc26537bb596 service nova] Acquiring lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.989765] env[62627]: DEBUG oslo_concurrency.lockutils [req-67992e3a-96e2-4dc9-959c-417a12864f63 req-174f77b8-f2ae-4605-8b30-dc26537bb596 service nova] Lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.989935] env[62627]: DEBUG oslo_concurrency.lockutils [req-67992e3a-96e2-4dc9-959c-417a12864f63 req-174f77b8-f2ae-4605-8b30-dc26537bb596 service nova] Lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.990151] env[62627]: DEBUG nova.compute.manager [req-67992e3a-96e2-4dc9-959c-417a12864f63 req-174f77b8-f2ae-4605-8b30-dc26537bb596 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] No waiting events found dispatching network-vif-plugged-fcb37279-335e-48f7-8d54-32ecee8fa761 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 971.990355] env[62627]: WARNING nova.compute.manager [req-67992e3a-96e2-4dc9-959c-417a12864f63 req-174f77b8-f2ae-4605-8b30-dc26537bb596 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Received unexpected event network-vif-plugged-fcb37279-335e-48f7-8d54-32ecee8fa761 for instance with vm_state building and task_state spawning. [ 972.083828] env[62627]: DEBUG nova.network.neutron [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Successfully updated port: fcb37279-335e-48f7-8d54-32ecee8fa761 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.089111] env[62627]: DEBUG nova.scheduler.client.report [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.375287] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2194659, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463391} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.375287] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f6357f01-d7cb-4477-be2d-98fc17d7a940/f6357f01-d7cb-4477-be2d-98fc17d7a940.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 972.375287] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 972.375287] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2fd88f3-5433-46f4-8094-cd07bf2d1edc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.382021] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 972.382021] env[62627]: value = "task-2194660" [ 972.382021] env[62627]: _type = "Task" [ 972.382021] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.389186] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2194660, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.586997] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "refresh_cache-e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.587239] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "refresh_cache-e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.587300] env[62627]: DEBUG nova.network.neutron [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.595749] env[62627]: DEBUG oslo_concurrency.lockutils [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.898s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.598642] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 36.828s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.598821] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.599015] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 972.599315] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.805s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.600826] env[62627]: INFO nova.compute.claims [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 972.603921] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a41526-af4a-4df8-aa82-b0cf1452ad9f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.612354] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03bce4b8-8157-410b-8eee-a5caa798dacd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.616696] env[62627]: INFO nova.scheduler.client.report [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted allocations for instance d0786233-087b-4000-8c37-fd5c55cc0ef4 [ 972.631418] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0099c4a0-70a8-49a5-ba16-ef2eef42a5fc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.638103] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1ff86f-1030-4e9c-a5e6-390f18854989 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.667724] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178572MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 972.667901] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.890426] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2194660, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061577} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.890782] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 972.891688] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d410d769-cea3-4352-9e99-ec56910af6ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.914263] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] f6357f01-d7cb-4477-be2d-98fc17d7a940/f6357f01-d7cb-4477-be2d-98fc17d7a940.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 972.914582] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80c7b47f-cdc2-41d8-b134-c9c38a72607a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.938822] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 972.938822] env[62627]: value = "task-2194661" [ 972.938822] env[62627]: _type = "Task" [ 972.938822] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.947629] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2194661, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.124040] env[62627]: DEBUG oslo_concurrency.lockutils [None req-048bd3cb-f9ad-4be1-bab6-addfc949e805 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "d0786233-087b-4000-8c37-fd5c55cc0ef4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.320s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.136198] env[62627]: DEBUG nova.network.neutron [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 973.344537] env[62627]: DEBUG nova.network.neutron [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Updating instance_info_cache with network_info: [{"id": "fcb37279-335e-48f7-8d54-32ecee8fa761", "address": "fa:16:3e:39:b9:68", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcb37279-33", "ovs_interfaceid": "fcb37279-335e-48f7-8d54-32ecee8fa761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.448963] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2194661, 'name': ReconfigVM_Task, 'duration_secs': 0.298639} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.449275] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Reconfigured VM instance instance-0000003e to attach disk [datastore2] f6357f01-d7cb-4477-be2d-98fc17d7a940/f6357f01-d7cb-4477-be2d-98fc17d7a940.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.449880] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0697ae5b-bd09-456c-9aa4-fdfee60d38f3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.457838] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 973.457838] env[62627]: value = "task-2194662" [ 973.457838] env[62627]: _type = "Task" [ 973.457838] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.466008] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2194662, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.847028] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "refresh_cache-e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.847270] env[62627]: DEBUG nova.compute.manager [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Instance network_info: |[{"id": "fcb37279-335e-48f7-8d54-32ecee8fa761", "address": "fa:16:3e:39:b9:68", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcb37279-33", "ovs_interfaceid": "fcb37279-335e-48f7-8d54-32ecee8fa761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 973.847709] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:b9:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52f465cb-7418-4172-bd7d-aec00abeb692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fcb37279-335e-48f7-8d54-32ecee8fa761', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.854967] env[62627]: DEBUG oslo.service.loopingcall [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.857124] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.857509] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54602fd6-a5bb-4e2c-a722-bc71da7e6b7b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.879265] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.879265] env[62627]: value = "task-2194663" [ 973.879265] env[62627]: _type = "Task" [ 973.879265] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.889164] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194663, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.928307] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11af2034-dffa-445f-98dc-333d2f624af4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.936681] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38429d2c-ff99-4e54-be08-925ac3903746 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.968988] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e524aa-de47-4cc3-9f52-31ed289dede2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.976210] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2194662, 'name': Rename_Task, 'duration_secs': 0.138959} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.978328] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 973.978610] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-364baf8d-79e6-43c2-ad1a-f5465cfa2cc6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.981054] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a463003d-e9a1-4fda-bfe9-68e7a6763893 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.998021] env[62627]: DEBUG nova.compute.provider_tree [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.999971] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 973.999971] env[62627]: value = "task-2194664" [ 973.999971] env[62627]: _type = "Task" [ 973.999971] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.008376] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2194664, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.015021] env[62627]: DEBUG nova.compute.manager [req-c79d6d65-5e9b-4ac7-969d-26cad3298995 req-05b17ec6-2800-4020-a4a2-299a72a7ad70 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Received event network-changed-fcb37279-335e-48f7-8d54-32ecee8fa761 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 974.015021] env[62627]: DEBUG nova.compute.manager [req-c79d6d65-5e9b-4ac7-969d-26cad3298995 req-05b17ec6-2800-4020-a4a2-299a72a7ad70 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Refreshing instance network info cache due to event network-changed-fcb37279-335e-48f7-8d54-32ecee8fa761. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 974.015285] env[62627]: DEBUG oslo_concurrency.lockutils [req-c79d6d65-5e9b-4ac7-969d-26cad3298995 req-05b17ec6-2800-4020-a4a2-299a72a7ad70 service nova] Acquiring lock "refresh_cache-e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.015468] env[62627]: DEBUG oslo_concurrency.lockutils [req-c79d6d65-5e9b-4ac7-969d-26cad3298995 req-05b17ec6-2800-4020-a4a2-299a72a7ad70 service nova] Acquired lock "refresh_cache-e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.015647] env[62627]: DEBUG nova.network.neutron [req-c79d6d65-5e9b-4ac7-969d-26cad3298995 req-05b17ec6-2800-4020-a4a2-299a72a7ad70 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Refreshing network info cache for port fcb37279-335e-48f7-8d54-32ecee8fa761 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 974.389311] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194663, 'name': CreateVM_Task, 'duration_secs': 0.351635} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.389517] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.390203] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.390365] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.390693] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 974.390955] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ae9f0e-01f7-42ad-a4fd-0be1709f91a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.396144] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 974.396144] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d2191d-59e2-2ad3-60c5-148a5ae58aa4" [ 974.396144] env[62627]: _type = "Task" [ 974.396144] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.404323] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d2191d-59e2-2ad3-60c5-148a5ae58aa4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.490340] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "07b08283-cbe5-4c36-9127-c423b259643d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.490340] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "07b08283-cbe5-4c36-9127-c423b259643d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.501298] env[62627]: DEBUG nova.scheduler.client.report [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 974.514687] env[62627]: DEBUG oslo_vmware.api [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2194664, 'name': PowerOnVM_Task, 'duration_secs': 0.451387} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.515539] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 974.515705] env[62627]: INFO nova.compute.manager [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Took 7.32 seconds to spawn the instance on the hypervisor. [ 974.515814] env[62627]: DEBUG nova.compute.manager [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.516611] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7521a121-d0eb-4e36-87b9-6f657a82ddc1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.760117] env[62627]: DEBUG nova.network.neutron [req-c79d6d65-5e9b-4ac7-969d-26cad3298995 req-05b17ec6-2800-4020-a4a2-299a72a7ad70 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Updated VIF entry in instance network info cache for port fcb37279-335e-48f7-8d54-32ecee8fa761. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.760494] env[62627]: DEBUG nova.network.neutron [req-c79d6d65-5e9b-4ac7-969d-26cad3298995 req-05b17ec6-2800-4020-a4a2-299a72a7ad70 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Updating instance_info_cache with network_info: [{"id": "fcb37279-335e-48f7-8d54-32ecee8fa761", "address": "fa:16:3e:39:b9:68", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcb37279-33", "ovs_interfaceid": "fcb37279-335e-48f7-8d54-32ecee8fa761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.907719] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d2191d-59e2-2ad3-60c5-148a5ae58aa4, 'name': SearchDatastore_Task, 'duration_secs': 0.013915} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.908065] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.908308] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 974.908538] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.908686] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.908863] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.909137] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7004c6cf-4538-40c3-9bb0-7d0cbb2aa2f3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.918229] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.918408] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 974.919130] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-706f1a5c-ead4-4073-a85c-416efaadfb5c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.924321] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 974.924321] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522ae07d-c73a-2556-2d48-27bfe7d7fc0a" [ 974.924321] env[62627]: _type = "Task" [ 974.924321] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.933117] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522ae07d-c73a-2556-2d48-27bfe7d7fc0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.992795] env[62627]: DEBUG nova.compute.manager [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 975.009808] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.010317] env[62627]: DEBUG nova.compute.manager [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 975.012862] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.028s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.013103] env[62627]: DEBUG nova.objects.instance [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lazy-loading 'resources' on Instance uuid 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.037672] env[62627]: INFO nova.compute.manager [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Took 47.11 seconds to build instance. [ 975.263456] env[62627]: DEBUG oslo_concurrency.lockutils [req-c79d6d65-5e9b-4ac7-969d-26cad3298995 req-05b17ec6-2800-4020-a4a2-299a72a7ad70 service nova] Releasing lock "refresh_cache-e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.438590] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522ae07d-c73a-2556-2d48-27bfe7d7fc0a, 'name': SearchDatastore_Task, 'duration_secs': 0.009783} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.439418] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17e180cb-de21-4ef3-b1c1-980d252ece32 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.444222] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 975.444222] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529d2f2e-3f3d-752e-b522-aeb77ac68594" [ 975.444222] env[62627]: _type = "Task" [ 975.444222] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.453092] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529d2f2e-3f3d-752e-b522-aeb77ac68594, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.515017] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.518481] env[62627]: DEBUG nova.compute.utils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 975.520030] env[62627]: DEBUG nova.compute.manager [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 975.520110] env[62627]: DEBUG nova.network.neutron [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 975.539943] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6edb8568-a4fc-4702-a1c2-ebf3359a74b9 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "f6357f01-d7cb-4477-be2d-98fc17d7a940" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.504s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.587610] env[62627]: DEBUG nova.policy [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '991f6f2c63da4e5cb3aff0dabe1d5968', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e06a7550ef84edd9861cafa0bfcb857', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 975.933263] env[62627]: DEBUG nova.network.neutron [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Successfully created port: b70ec01a-fb73-4c5e-9db9-88792bcf78d7 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 975.955957] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529d2f2e-3f3d-752e-b522-aeb77ac68594, 'name': SearchDatastore_Task, 'duration_secs': 0.008701} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.956214] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.956476] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea/e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 975.957528] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-85ed819d-860e-4544-8005-fd2db37b8135 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.959870] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ebc7d8-f1eb-46c8-abaf-2d96dcb571b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.966643] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba371a46-2a15-45ad-8383-3b89b342ee70 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.972272] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 975.972272] env[62627]: value = "task-2194665" [ 975.972272] env[62627]: _type = "Task" [ 975.972272] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.009022] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e4e024-daef-4bd8-9760-60ecdc01bdb4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.011328] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.016051] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee011396-6829-4926-8096-8620f93dc331 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.029326] env[62627]: DEBUG nova.compute.manager [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 976.032785] env[62627]: DEBUG nova.compute.provider_tree [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.482023] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456158} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.482291] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea/e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 976.482581] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.482752] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2cf9dbb1-3c5d-4304-840a-da5c6b2d50a2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.488811] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 976.488811] env[62627]: value = "task-2194666" [ 976.488811] env[62627]: _type = "Task" [ 976.488811] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.495749] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194666, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.541020] env[62627]: DEBUG nova.scheduler.client.report [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 976.542394] env[62627]: DEBUG nova.compute.manager [req-d77efb47-65f9-41b3-929b-103588957719 req-19d48111-ee6b-49b4-ae21-c1cfe1133ecc service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Received event network-changed-43ceb60f-795b-49ef-b06d-892b278ac067 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 976.542574] env[62627]: DEBUG nova.compute.manager [req-d77efb47-65f9-41b3-929b-103588957719 req-19d48111-ee6b-49b4-ae21-c1cfe1133ecc service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Refreshing instance network info cache due to event network-changed-43ceb60f-795b-49ef-b06d-892b278ac067. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 976.543839] env[62627]: DEBUG oslo_concurrency.lockutils [req-d77efb47-65f9-41b3-929b-103588957719 req-19d48111-ee6b-49b4-ae21-c1cfe1133ecc service nova] Acquiring lock "refresh_cache-f6357f01-d7cb-4477-be2d-98fc17d7a940" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.543839] env[62627]: DEBUG oslo_concurrency.lockutils [req-d77efb47-65f9-41b3-929b-103588957719 req-19d48111-ee6b-49b4-ae21-c1cfe1133ecc service nova] Acquired lock "refresh_cache-f6357f01-d7cb-4477-be2d-98fc17d7a940" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.543839] env[62627]: DEBUG nova.network.neutron [req-d77efb47-65f9-41b3-929b-103588957719 req-19d48111-ee6b-49b4-ae21-c1cfe1133ecc service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Refreshing network info cache for port 43ceb60f-795b-49ef-b06d-892b278ac067 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.999236] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194666, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068139} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.999527] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 977.000338] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bae6bb-fb15-400e-8688-46be4d9e2a98 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.023196] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea/e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.023540] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a150b7c-c243-44f9-b126-0ee82a41636c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.042731] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 977.042731] env[62627]: value = "task-2194667" [ 977.042731] env[62627]: _type = "Task" [ 977.042731] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.047069] env[62627]: DEBUG nova.compute.manager [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 977.048930] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.036s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.052536] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.047s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.054330] env[62627]: INFO nova.compute.claims [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 977.066127] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194667, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.078502] env[62627]: INFO nova.scheduler.client.report [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleted allocations for instance 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218 [ 977.082572] env[62627]: DEBUG nova.virt.hardware [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 977.082572] env[62627]: DEBUG nova.virt.hardware [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 977.082572] env[62627]: DEBUG nova.virt.hardware [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 977.082932] env[62627]: DEBUG nova.virt.hardware [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 977.085635] env[62627]: DEBUG nova.virt.hardware [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 977.085635] env[62627]: DEBUG nova.virt.hardware [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 977.085635] env[62627]: DEBUG nova.virt.hardware [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 977.085635] env[62627]: DEBUG nova.virt.hardware [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 977.085635] env[62627]: DEBUG nova.virt.hardware [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 977.085635] env[62627]: DEBUG nova.virt.hardware [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 977.085635] env[62627]: DEBUG nova.virt.hardware [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 977.085635] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0438d745-6fab-4f07-9e69-3667781ab2e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.101515] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3c898e-94db-489c-84b3-488c973c0960 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.318505] env[62627]: DEBUG nova.network.neutron [req-d77efb47-65f9-41b3-929b-103588957719 req-19d48111-ee6b-49b4-ae21-c1cfe1133ecc service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Updated VIF entry in instance network info cache for port 43ceb60f-795b-49ef-b06d-892b278ac067. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 977.318875] env[62627]: DEBUG nova.network.neutron [req-d77efb47-65f9-41b3-929b-103588957719 req-19d48111-ee6b-49b4-ae21-c1cfe1133ecc service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Updating instance_info_cache with network_info: [{"id": "43ceb60f-795b-49ef-b06d-892b278ac067", "address": "fa:16:3e:4a:6c:ac", "network": {"id": "e4275dc3-df4b-49c1-9677-3c0fc9408d2e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1014582117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711f8ab1a2d5466399f408870e412587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43ceb60f-79", "ovs_interfaceid": "43ceb60f-795b-49ef-b06d-892b278ac067", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.428807] env[62627]: DEBUG nova.compute.manager [req-31feba97-d49c-42bd-a73a-52867f9cd379 req-68f8102d-599f-4478-94cc-f3829a1fe727 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] Received event network-vif-plugged-b70ec01a-fb73-4c5e-9db9-88792bcf78d7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 977.429033] env[62627]: DEBUG oslo_concurrency.lockutils [req-31feba97-d49c-42bd-a73a-52867f9cd379 req-68f8102d-599f-4478-94cc-f3829a1fe727 service nova] Acquiring lock "0de040e4-8896-4f02-9284-f68100254631-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.429254] env[62627]: DEBUG oslo_concurrency.lockutils [req-31feba97-d49c-42bd-a73a-52867f9cd379 req-68f8102d-599f-4478-94cc-f3829a1fe727 service nova] Lock "0de040e4-8896-4f02-9284-f68100254631-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.429424] env[62627]: DEBUG oslo_concurrency.lockutils [req-31feba97-d49c-42bd-a73a-52867f9cd379 req-68f8102d-599f-4478-94cc-f3829a1fe727 service nova] Lock "0de040e4-8896-4f02-9284-f68100254631-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.429591] env[62627]: DEBUG nova.compute.manager [req-31feba97-d49c-42bd-a73a-52867f9cd379 req-68f8102d-599f-4478-94cc-f3829a1fe727 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] No waiting events found dispatching network-vif-plugged-b70ec01a-fb73-4c5e-9db9-88792bcf78d7 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 977.429754] env[62627]: WARNING nova.compute.manager [req-31feba97-d49c-42bd-a73a-52867f9cd379 req-68f8102d-599f-4478-94cc-f3829a1fe727 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] Received unexpected event network-vif-plugged-b70ec01a-fb73-4c5e-9db9-88792bcf78d7 for instance with vm_state building and task_state spawning. [ 977.514541] env[62627]: DEBUG nova.network.neutron [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Successfully updated port: b70ec01a-fb73-4c5e-9db9-88792bcf78d7 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 977.553292] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194667, 'name': ReconfigVM_Task, 'duration_secs': 0.271173} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.553700] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Reconfigured VM instance instance-0000003f to attach disk [datastore2] e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea/e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.554375] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a39f5fef-124d-4580-8a59-4927c0e70844 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.562485] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 977.562485] env[62627]: value = "task-2194668" [ 977.562485] env[62627]: _type = "Task" [ 977.562485] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.571673] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194668, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.600086] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f87ae548-ed88-4a62-b117-0d50b6e05974 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "87bc4af9-7cbc-4797-8aaa-cf5f8ba51218" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.572s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.821848] env[62627]: DEBUG oslo_concurrency.lockutils [req-d77efb47-65f9-41b3-929b-103588957719 req-19d48111-ee6b-49b4-ae21-c1cfe1133ecc service nova] Releasing lock "refresh_cache-f6357f01-d7cb-4477-be2d-98fc17d7a940" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.017379] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "refresh_cache-0de040e4-8896-4f02-9284-f68100254631" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.017561] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired lock "refresh_cache-0de040e4-8896-4f02-9284-f68100254631" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.017744] env[62627]: DEBUG nova.network.neutron [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 978.075166] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194668, 'name': Rename_Task, 'duration_secs': 0.138846} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.075489] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.075762] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01b22fbe-aa74-4646-ae7d-347e185c8b3b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.082840] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 978.082840] env[62627]: value = "task-2194669" [ 978.082840] env[62627]: _type = "Task" [ 978.082840] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.090222] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194669, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.408533] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6297aa75-b13b-4617-8688-385870c11d3e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.415481] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32dba4c4-07a4-47a3-9f9e-ffa20f3a7e90 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.445736] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1207045e-4e84-435d-8ad2-1bb4074aa2d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.452883] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9037d394-233e-4c88-8bda-b35404265984 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.465636] env[62627]: DEBUG nova.compute.provider_tree [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.549969] env[62627]: DEBUG nova.network.neutron [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 978.594855] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194669, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.613015] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "b1ba58c1-340f-486a-8f47-86fbc7b27485" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.613335] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "b1ba58c1-340f-486a-8f47-86fbc7b27485" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.613553] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "b1ba58c1-340f-486a-8f47-86fbc7b27485-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.613739] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "b1ba58c1-340f-486a-8f47-86fbc7b27485-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.613905] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "b1ba58c1-340f-486a-8f47-86fbc7b27485-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.616198] env[62627]: INFO nova.compute.manager [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Terminating instance [ 978.700350] env[62627]: DEBUG nova.network.neutron [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Updating instance_info_cache with network_info: [{"id": "b70ec01a-fb73-4c5e-9db9-88792bcf78d7", "address": "fa:16:3e:f5:58:1e", "network": {"id": "c8f2b47a-fb3f-4fb5-a55a-d527bd4b6f03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2107012038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e06a7550ef84edd9861cafa0bfcb857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb70ec01a-fb", "ovs_interfaceid": "b70ec01a-fb73-4c5e-9db9-88792bcf78d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.968796] env[62627]: DEBUG nova.scheduler.client.report [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 979.093396] env[62627]: DEBUG oslo_vmware.api [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194669, 'name': PowerOnVM_Task, 'duration_secs': 0.562176} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.093656] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.093852] env[62627]: INFO nova.compute.manager [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Took 7.39 seconds to spawn the instance on the hypervisor. [ 979.094039] env[62627]: DEBUG nova.compute.manager [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 979.094776] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5982ea-de4c-44f3-a134-66f5ba810071 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.122533] env[62627]: DEBUG nova.compute.manager [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 979.122727] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 979.123784] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2c4b45-0994-4113-a431-c075ce6dfe1f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.131198] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.131427] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a9f7345-041a-468f-8c37-4ac3cec8a166 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.137703] env[62627]: DEBUG oslo_vmware.api [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 979.137703] env[62627]: value = "task-2194670" [ 979.137703] env[62627]: _type = "Task" [ 979.137703] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.146014] env[62627]: DEBUG oslo_vmware.api [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194670, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.203573] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Releasing lock "refresh_cache-0de040e4-8896-4f02-9284-f68100254631" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.203902] env[62627]: DEBUG nova.compute.manager [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Instance network_info: |[{"id": "b70ec01a-fb73-4c5e-9db9-88792bcf78d7", "address": "fa:16:3e:f5:58:1e", "network": {"id": "c8f2b47a-fb3f-4fb5-a55a-d527bd4b6f03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2107012038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e06a7550ef84edd9861cafa0bfcb857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb70ec01a-fb", "ovs_interfaceid": "b70ec01a-fb73-4c5e-9db9-88792bcf78d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 979.204672] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:58:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b70ec01a-fb73-4c5e-9db9-88792bcf78d7', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 979.212080] env[62627]: DEBUG oslo.service.loopingcall [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 979.212297] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0de040e4-8896-4f02-9284-f68100254631] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 979.212515] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3026862-3ddc-4ad5-bddd-19c1fd8ccec7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.233140] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 979.233140] env[62627]: value = "task-2194671" [ 979.233140] env[62627]: _type = "Task" [ 979.233140] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.241634] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194671, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.452208] env[62627]: DEBUG nova.compute.manager [req-fecc84c0-b67d-48c6-ac7f-af51802f26a0 req-bf377d4a-41aa-4bfd-8562-ada7b3810431 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] Received event network-changed-b70ec01a-fb73-4c5e-9db9-88792bcf78d7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 979.452517] env[62627]: DEBUG nova.compute.manager [req-fecc84c0-b67d-48c6-ac7f-af51802f26a0 req-bf377d4a-41aa-4bfd-8562-ada7b3810431 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] Refreshing instance network info cache due to event network-changed-b70ec01a-fb73-4c5e-9db9-88792bcf78d7. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 979.452769] env[62627]: DEBUG oslo_concurrency.lockutils [req-fecc84c0-b67d-48c6-ac7f-af51802f26a0 req-bf377d4a-41aa-4bfd-8562-ada7b3810431 service nova] Acquiring lock "refresh_cache-0de040e4-8896-4f02-9284-f68100254631" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.453106] env[62627]: DEBUG oslo_concurrency.lockutils [req-fecc84c0-b67d-48c6-ac7f-af51802f26a0 req-bf377d4a-41aa-4bfd-8562-ada7b3810431 service nova] Acquired lock "refresh_cache-0de040e4-8896-4f02-9284-f68100254631" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.453331] env[62627]: DEBUG nova.network.neutron [req-fecc84c0-b67d-48c6-ac7f-af51802f26a0 req-bf377d4a-41aa-4bfd-8562-ada7b3810431 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] Refreshing network info cache for port b70ec01a-fb73-4c5e-9db9-88792bcf78d7 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 979.473500] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.421s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.474085] env[62627]: DEBUG nova.compute.manager [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 979.476999] env[62627]: DEBUG oslo_concurrency.lockutils [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.482s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.477260] env[62627]: DEBUG nova.objects.instance [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Lazy-loading 'resources' on Instance uuid 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.613323] env[62627]: INFO nova.compute.manager [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Took 48.29 seconds to build instance. [ 979.647598] env[62627]: DEBUG oslo_vmware.api [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194670, 'name': PowerOffVM_Task, 'duration_secs': 0.231313} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.647850] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 979.648053] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 979.648301] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec4b4cad-0785-40be-97a3-321b3c2d0914 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.707247] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 979.707465] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 979.707644] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleting the datastore file [datastore1] b1ba58c1-340f-486a-8f47-86fbc7b27485 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 979.707898] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7fb1388-21d0-4441-985b-d2630a0ec306 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.714439] env[62627]: DEBUG oslo_vmware.api [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 979.714439] env[62627]: value = "task-2194673" [ 979.714439] env[62627]: _type = "Task" [ 979.714439] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.722211] env[62627]: DEBUG oslo_vmware.api [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.741448] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194671, 'name': CreateVM_Task, 'duration_secs': 0.279695} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.741669] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0de040e4-8896-4f02-9284-f68100254631] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 979.742522] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.742522] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.742920] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 979.743277] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-226e7751-ac8c-45ae-9970-9acb5f125a47 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.748565] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 979.748565] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f51143-6d63-517e-cbc4-8da22f426a06" [ 979.748565] env[62627]: _type = "Task" [ 979.748565] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.754895] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f51143-6d63-517e-cbc4-8da22f426a06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.982651] env[62627]: DEBUG nova.compute.utils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 979.986295] env[62627]: DEBUG nova.compute.manager [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 979.986474] env[62627]: DEBUG nova.network.neutron [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 980.050665] env[62627]: DEBUG nova.policy [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e72f59cc4a02460088e86ad29af1e0ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97c11d791dcf4f94a8e8bda7a6f014cd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 980.115484] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07611db3-75d5-4d58-891f-dda53ab7227d tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.779s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.198222] env[62627]: DEBUG nova.network.neutron [req-fecc84c0-b67d-48c6-ac7f-af51802f26a0 req-bf377d4a-41aa-4bfd-8562-ada7b3810431 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] Updated VIF entry in instance network info cache for port b70ec01a-fb73-4c5e-9db9-88792bcf78d7. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 980.198676] env[62627]: DEBUG nova.network.neutron [req-fecc84c0-b67d-48c6-ac7f-af51802f26a0 req-bf377d4a-41aa-4bfd-8562-ada7b3810431 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] Updating instance_info_cache with network_info: [{"id": "b70ec01a-fb73-4c5e-9db9-88792bcf78d7", "address": "fa:16:3e:f5:58:1e", "network": {"id": "c8f2b47a-fb3f-4fb5-a55a-d527bd4b6f03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2107012038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e06a7550ef84edd9861cafa0bfcb857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb70ec01a-fb", "ovs_interfaceid": "b70ec01a-fb73-4c5e-9db9-88792bcf78d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.226743] env[62627]: DEBUG oslo_vmware.api [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134771} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.226743] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 980.226862] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 980.226996] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 980.227189] env[62627]: INFO nova.compute.manager [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Took 1.10 seconds to destroy the instance on the hypervisor. [ 980.227479] env[62627]: DEBUG oslo.service.loopingcall [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 980.227684] env[62627]: DEBUG nova.compute.manager [-] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 980.227774] env[62627]: DEBUG nova.network.neutron [-] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 980.258210] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f51143-6d63-517e-cbc4-8da22f426a06, 'name': SearchDatastore_Task, 'duration_secs': 0.008109} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.258471] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.258687] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 980.258925] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.259066] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.259249] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.259586] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-936ea76f-2a6d-4730-ba51-2b75526ddcee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.271787] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.271969] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 980.272690] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ad26a14-c829-494b-a201-24e5aca8ff3c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.278552] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 980.278552] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52aa3dff-5d22-d311-bbdf-86650bab7a51" [ 980.278552] env[62627]: _type = "Task" [ 980.278552] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.289699] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52aa3dff-5d22-d311-bbdf-86650bab7a51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.387782] env[62627]: DEBUG nova.network.neutron [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Successfully created port: 5825a537-f659-4e7e-907e-e22208e06c0c {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 980.405286] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b89b17-2611-4475-a12e-4b507204d45c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.414520] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22484d30-d3f2-4e01-8651-e5aca7ee60cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.453324] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4aca1ab-697d-47eb-8660-e12f749bab43 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.461823] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9f1254-af4f-4702-a093-5560bbbb7380 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.476812] env[62627]: DEBUG nova.compute.provider_tree [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.489239] env[62627]: DEBUG nova.compute.manager [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 980.701084] env[62627]: DEBUG oslo_concurrency.lockutils [req-fecc84c0-b67d-48c6-ac7f-af51802f26a0 req-bf377d4a-41aa-4bfd-8562-ada7b3810431 service nova] Releasing lock "refresh_cache-0de040e4-8896-4f02-9284-f68100254631" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.789194] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52aa3dff-5d22-d311-bbdf-86650bab7a51, 'name': SearchDatastore_Task, 'duration_secs': 0.009517} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.790091] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aeaf255a-3d00-47f7-87c1-f1a6d652d978 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.795433] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 980.795433] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d99944-5b7e-d2bc-4643-bfb684f50479" [ 980.795433] env[62627]: _type = "Task" [ 980.795433] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.802889] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d99944-5b7e-d2bc-4643-bfb684f50479, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.957174] env[62627]: DEBUG nova.network.neutron [-] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.979766] env[62627]: DEBUG nova.scheduler.client.report [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 981.308262] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d99944-5b7e-d2bc-4643-bfb684f50479, 'name': SearchDatastore_Task, 'duration_secs': 0.009244} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.308586] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.308893] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 0de040e4-8896-4f02-9284-f68100254631/0de040e4-8896-4f02-9284-f68100254631.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 981.309212] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-323acbb2-ec22-49d6-811e-dce264413e41 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.317110] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 981.317110] env[62627]: value = "task-2194674" [ 981.317110] env[62627]: _type = "Task" [ 981.317110] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.328010] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194674, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.462220] env[62627]: INFO nova.compute.manager [-] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Took 1.23 seconds to deallocate network for instance. [ 981.477220] env[62627]: DEBUG nova.compute.manager [req-70ed6e25-e93d-4b02-9026-3a3bbf3ace04 req-6810b441-bd28-43a2-9e70-7a17bd2b99d8 service nova] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Received event network-vif-deleted-85865adb-92d3-4f6e-a7af-6ddd191d13d3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 981.484618] env[62627]: DEBUG oslo_concurrency.lockutils [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.008s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.487167] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 31.555s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.499214] env[62627]: DEBUG nova.compute.manager [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 981.512991] env[62627]: INFO nova.scheduler.client.report [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Deleted allocations for instance 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a [ 981.528253] env[62627]: DEBUG nova.virt.hardware [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 981.528548] env[62627]: DEBUG nova.virt.hardware [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 981.528714] env[62627]: DEBUG nova.virt.hardware [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 981.528897] env[62627]: DEBUG nova.virt.hardware [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 981.529100] env[62627]: DEBUG nova.virt.hardware [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 981.529263] env[62627]: DEBUG nova.virt.hardware [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 981.529476] env[62627]: DEBUG nova.virt.hardware [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 981.529636] env[62627]: DEBUG nova.virt.hardware [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 981.529802] env[62627]: DEBUG nova.virt.hardware [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 981.530076] env[62627]: DEBUG nova.virt.hardware [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 981.530323] env[62627]: DEBUG nova.virt.hardware [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 981.531250] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845d7555-1f50-41b2-85e7-37008a58d1f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.541061] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc18832-55d7-4eca-9c8a-143e477dd553 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.546450] env[62627]: DEBUG nova.compute.manager [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 981.548041] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f7d6d27-b165-45c5-a8e6-864f70f06be8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.829443] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194674, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.932981] env[62627]: DEBUG nova.compute.manager [req-50d18100-3613-4743-aa01-bb289b4f5ca3 req-bf804dd1-62dd-400d-b2f4-b21e9191df44 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Received event network-vif-plugged-5825a537-f659-4e7e-907e-e22208e06c0c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 981.933536] env[62627]: DEBUG oslo_concurrency.lockutils [req-50d18100-3613-4743-aa01-bb289b4f5ca3 req-bf804dd1-62dd-400d-b2f4-b21e9191df44 service nova] Acquiring lock "e9d33611-e373-4303-937d-b43d893e51f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.933916] env[62627]: DEBUG oslo_concurrency.lockutils [req-50d18100-3613-4743-aa01-bb289b4f5ca3 req-bf804dd1-62dd-400d-b2f4-b21e9191df44 service nova] Lock "e9d33611-e373-4303-937d-b43d893e51f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.934245] env[62627]: DEBUG oslo_concurrency.lockutils [req-50d18100-3613-4743-aa01-bb289b4f5ca3 req-bf804dd1-62dd-400d-b2f4-b21e9191df44 service nova] Lock "e9d33611-e373-4303-937d-b43d893e51f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.934646] env[62627]: DEBUG nova.compute.manager [req-50d18100-3613-4743-aa01-bb289b4f5ca3 req-bf804dd1-62dd-400d-b2f4-b21e9191df44 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] No waiting events found dispatching network-vif-plugged-5825a537-f659-4e7e-907e-e22208e06c0c {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 981.934766] env[62627]: WARNING nova.compute.manager [req-50d18100-3613-4743-aa01-bb289b4f5ca3 req-bf804dd1-62dd-400d-b2f4-b21e9191df44 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Received unexpected event network-vif-plugged-5825a537-f659-4e7e-907e-e22208e06c0c for instance with vm_state building and task_state spawning. [ 981.969345] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.990126] env[62627]: DEBUG nova.objects.instance [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lazy-loading 'migration_context' on Instance uuid 892fc84b-02a9-4fa0-81b5-80326a060c64 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.021051] env[62627]: DEBUG oslo_concurrency.lockutils [None req-573c58fd-08ae-43be-961d-aafd92d16d8a tempest-ServerDiagnosticsTest-533968786 tempest-ServerDiagnosticsTest-533968786-project-member] Lock "4e65e39e-3b0b-4c71-87d7-e1ed663ada4a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.656s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.034142] env[62627]: DEBUG nova.network.neutron [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Successfully updated port: 5825a537-f659-4e7e-907e-e22208e06c0c {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 982.069198] env[62627]: INFO nova.compute.manager [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] instance snapshotting [ 982.072631] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5834535e-3cf1-4644-815c-187ab9d7ebcf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.092241] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb9c362-96a6-4432-862b-0cca023420b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.330204] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194674, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549521} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.333672] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 0de040e4-8896-4f02-9284-f68100254631/0de040e4-8896-4f02-9284-f68100254631.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 982.333672] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 982.333672] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02ee56a9-c416-48e2-862c-053ffc897d91 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.339337] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 982.339337] env[62627]: value = "task-2194675" [ 982.339337] env[62627]: _type = "Task" [ 982.339337] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.349872] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194675, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.537435] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.537597] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.537758] env[62627]: DEBUG nova.network.neutron [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.603398] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 982.605807] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-00dff738-64f2-41ee-8ce4-18656f022481 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.614158] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 982.614158] env[62627]: value = "task-2194676" [ 982.614158] env[62627]: _type = "Task" [ 982.614158] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.626290] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194676, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.849318] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194675, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062885} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.850411] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 982.851186] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69e48d6-cb18-45b0-9b6e-eb17670be988 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.854103] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15380ab7-cd16-4410-bdb4-cd71064f9908 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.869636] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15edece-afcb-43f5-95b6-1cfbbf8c02f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.882429] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 0de040e4-8896-4f02-9284-f68100254631/0de040e4-8896-4f02-9284-f68100254631.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.882648] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11cfa69a-c1b5-4280-9794-9f90a22f1f88 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.932582] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-907d70e4-5217-4234-8c32-140634413c01 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.935918] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 982.935918] env[62627]: value = "task-2194677" [ 982.935918] env[62627]: _type = "Task" [ 982.935918] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.943558] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff19f6c-a00b-40b3-acdf-db3a7d8ccb37 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.950891] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.962121] env[62627]: DEBUG nova.compute.provider_tree [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.075168] env[62627]: DEBUG nova.network.neutron [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 983.127604] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194676, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.235355] env[62627]: DEBUG nova.network.neutron [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance_info_cache with network_info: [{"id": "5825a537-f659-4e7e-907e-e22208e06c0c", "address": "fa:16:3e:a9:0b:9d", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5825a537-f6", "ovs_interfaceid": "5825a537-f659-4e7e-907e-e22208e06c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.446367] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194677, 'name': ReconfigVM_Task, 'duration_secs': 0.310745} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.447027] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 0de040e4-8896-4f02-9284-f68100254631/0de040e4-8896-4f02-9284-f68100254631.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.447396] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c9ca80aa-ed30-46b7-a63c-1171845a5d27 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.453907] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 983.453907] env[62627]: value = "task-2194678" [ 983.453907] env[62627]: _type = "Task" [ 983.453907] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.464332] env[62627]: DEBUG nova.scheduler.client.report [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.468151] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194678, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.629135] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194676, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.739206] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.739555] env[62627]: DEBUG nova.compute.manager [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Instance network_info: |[{"id": "5825a537-f659-4e7e-907e-e22208e06c0c", "address": "fa:16:3e:a9:0b:9d", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5825a537-f6", "ovs_interfaceid": "5825a537-f659-4e7e-907e-e22208e06c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 983.739987] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:0b:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5825a537-f659-4e7e-907e-e22208e06c0c', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 983.747789] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Creating folder: Project (97c11d791dcf4f94a8e8bda7a6f014cd). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 983.748480] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-418d340b-0a88-4bbd-b7be-1df6925e7528 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.759645] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Created folder: Project (97c11d791dcf4f94a8e8bda7a6f014cd) in parent group-v447541. [ 983.759837] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Creating folder: Instances. Parent ref: group-v447718. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 983.760096] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec2885e1-cbf4-4956-9e80-9b365b545075 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.768930] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Created folder: Instances in parent group-v447718. [ 983.769175] env[62627]: DEBUG oslo.service.loopingcall [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 983.769368] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 983.769572] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bef5ff8d-a0fe-4645-8609-f80605617973 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.787642] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 983.787642] env[62627]: value = "task-2194681" [ 983.787642] env[62627]: _type = "Task" [ 983.787642] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.795369] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194681, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.961538] env[62627]: DEBUG nova.compute.manager [req-552aed06-49bf-40c8-be74-f63406200374 req-81178268-c33a-43f0-9c14-a0164e0d070a service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Received event network-changed-5825a537-f659-4e7e-907e-e22208e06c0c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 983.961790] env[62627]: DEBUG nova.compute.manager [req-552aed06-49bf-40c8-be74-f63406200374 req-81178268-c33a-43f0-9c14-a0164e0d070a service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Refreshing instance network info cache due to event network-changed-5825a537-f659-4e7e-907e-e22208e06c0c. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 983.961917] env[62627]: DEBUG oslo_concurrency.lockutils [req-552aed06-49bf-40c8-be74-f63406200374 req-81178268-c33a-43f0-9c14-a0164e0d070a service nova] Acquiring lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.962096] env[62627]: DEBUG oslo_concurrency.lockutils [req-552aed06-49bf-40c8-be74-f63406200374 req-81178268-c33a-43f0-9c14-a0164e0d070a service nova] Acquired lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.962301] env[62627]: DEBUG nova.network.neutron [req-552aed06-49bf-40c8-be74-f63406200374 req-81178268-c33a-43f0-9c14-a0164e0d070a service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Refreshing network info cache for port 5825a537-f659-4e7e-907e-e22208e06c0c {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 983.973849] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194678, 'name': Rename_Task, 'duration_secs': 0.220189} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.973849] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 983.974360] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6c7f4ee-f736-471a-aa85-2275c18ec3da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.981046] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 983.981046] env[62627]: value = "task-2194682" [ 983.981046] env[62627]: _type = "Task" [ 983.981046] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.991379] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194682, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.129529] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194676, 'name': CreateSnapshot_Task, 'duration_secs': 1.032206} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.129800] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 984.130554] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2857e2fb-31d9-47c2-8308-5e7bf94e6e97 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.297257] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194681, 'name': CreateVM_Task, 'duration_secs': 0.377405} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.297465] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 984.298164] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.298333] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.298703] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 984.299111] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb778dd9-6710-4e13-bac5-26081a2bf486 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.303138] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 984.303138] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5226820f-3a4e-1630-4a90-0875b1c7e2b1" [ 984.303138] env[62627]: _type = "Task" [ 984.303138] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.310859] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5226820f-3a4e-1630-4a90-0875b1c7e2b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.475144] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.988s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.481325] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.569s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.482888] env[62627]: INFO nova.compute.claims [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 984.503654] env[62627]: DEBUG oslo_vmware.api [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194682, 'name': PowerOnVM_Task, 'duration_secs': 0.506899} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.504498] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 984.504761] env[62627]: INFO nova.compute.manager [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Took 7.46 seconds to spawn the instance on the hypervisor. [ 984.504924] env[62627]: DEBUG nova.compute.manager [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 984.505737] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1074da65-3069-401f-b40e-04d76b3c5d99 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.655026] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 984.655026] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1ee5e174-e3ca-4b50-9829-161d9f4726e5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.661689] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 984.661689] env[62627]: value = "task-2194683" [ 984.661689] env[62627]: _type = "Task" [ 984.661689] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.671916] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194683, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.699982] env[62627]: DEBUG nova.network.neutron [req-552aed06-49bf-40c8-be74-f63406200374 req-81178268-c33a-43f0-9c14-a0164e0d070a service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updated VIF entry in instance network info cache for port 5825a537-f659-4e7e-907e-e22208e06c0c. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 984.700435] env[62627]: DEBUG nova.network.neutron [req-552aed06-49bf-40c8-be74-f63406200374 req-81178268-c33a-43f0-9c14-a0164e0d070a service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance_info_cache with network_info: [{"id": "5825a537-f659-4e7e-907e-e22208e06c0c", "address": "fa:16:3e:a9:0b:9d", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5825a537-f6", "ovs_interfaceid": "5825a537-f659-4e7e-907e-e22208e06c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.814294] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5226820f-3a4e-1630-4a90-0875b1c7e2b1, 'name': SearchDatastore_Task, 'duration_secs': 0.010948} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.814294] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.814294] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 984.814515] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.814662] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.814843] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 984.815191] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-211fbda7-98b5-4702-8358-897d5c445063 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.823045] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 984.823192] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 984.823932] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d73f6130-7c3b-467b-891b-d55d7c9b8094 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.830625] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 984.830625] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ce379e-7406-e336-80be-eb272e9bd397" [ 984.830625] env[62627]: _type = "Task" [ 984.830625] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.837853] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ce379e-7406-e336-80be-eb272e9bd397, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.025911] env[62627]: INFO nova.compute.manager [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Took 49.25 seconds to build instance. [ 985.173984] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194683, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.202769] env[62627]: DEBUG oslo_concurrency.lockutils [req-552aed06-49bf-40c8-be74-f63406200374 req-81178268-c33a-43f0-9c14-a0164e0d070a service nova] Releasing lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.342820] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ce379e-7406-e336-80be-eb272e9bd397, 'name': SearchDatastore_Task, 'duration_secs': 0.013157} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.343677] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33c5abd0-5c30-4895-8a50-a791b63824fe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.349082] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 985.349082] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5229b8b9-918b-e6d4-d31f-f8cf8a6f5ac6" [ 985.349082] env[62627]: _type = "Task" [ 985.349082] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.356693] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5229b8b9-918b-e6d4-d31f-f8cf8a6f5ac6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.527942] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96685ff4-4a5a-44f8-8a49-b99ad05d7e0e tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "0de040e4-8896-4f02-9284-f68100254631" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.711s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.676416] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194683, 'name': CloneVM_Task} progress is 95%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.850958] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5c614c-d66e-4aeb-9538-68df7a044a06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.865028] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399ad80d-debc-40dd-ae0d-807790afb8df {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.868376] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5229b8b9-918b-e6d4-d31f-f8cf8a6f5ac6, 'name': SearchDatastore_Task, 'duration_secs': 0.009643} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.868376] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.868586] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] e9d33611-e373-4303-937d-b43d893e51f5/e9d33611-e373-4303-937d-b43d893e51f5.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 985.869196] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53592aac-e7a7-4708-8407-43d4344b63b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.899937] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e25236-90fe-41ab-b243-ed17149fcfd2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.905576] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 985.905576] env[62627]: value = "task-2194684" [ 985.905576] env[62627]: _type = "Task" [ 985.905576] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.911814] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2639e1ff-981d-4739-b0f5-9ca1553d8ced {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.920554] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.929358] env[62627]: DEBUG nova.compute.provider_tree [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.020690] env[62627]: INFO nova.compute.manager [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Swapping old allocation on dict_keys(['c17e7f32-68d8-4e2f-8979-15d701de02f7']) held by migration f454579e-0768-41bf-8774-f65a3637cfab for instance [ 986.045074] env[62627]: DEBUG nova.scheduler.client.report [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Overwriting current allocation {'allocations': {'c17e7f32-68d8-4e2f-8979-15d701de02f7': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 82}}, 'project_id': 'e4db2144e94840c4a422d2d933b4a65f', 'user_id': '0896d768b334478bb19804bbb156f4bb', 'consumer_generation': 1} on consumer 892fc84b-02a9-4fa0-81b5-80326a060c64 {{(pid=62627) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 986.158040] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.158526] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquired lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.158954] env[62627]: DEBUG nova.network.neutron [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.183236] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194683, 'name': CloneVM_Task, 'duration_secs': 1.333088} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.183666] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Created linked-clone VM from snapshot [ 986.184708] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982ec661-0fef-41d0-b76e-8190eb3d7ab7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.193456] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Uploading image aeba0208-712f-4f27-adb0-2e5cef3d30a3 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 986.222420] env[62627]: DEBUG oslo_vmware.rw_handles [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 986.222420] env[62627]: value = "vm-447721" [ 986.222420] env[62627]: _type = "VirtualMachine" [ 986.222420] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 986.222694] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3cd4ff6f-6dbe-4555-aee0-47e85b32fa8a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.231570] env[62627]: DEBUG oslo_vmware.rw_handles [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lease: (returnval){ [ 986.231570] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528e2e30-ef86-25cc-cadf-74b7f38f37c5" [ 986.231570] env[62627]: _type = "HttpNfcLease" [ 986.231570] env[62627]: } obtained for exporting VM: (result){ [ 986.231570] env[62627]: value = "vm-447721" [ 986.231570] env[62627]: _type = "VirtualMachine" [ 986.231570] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 986.231882] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the lease: (returnval){ [ 986.231882] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528e2e30-ef86-25cc-cadf-74b7f38f37c5" [ 986.231882] env[62627]: _type = "HttpNfcLease" [ 986.231882] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 986.239774] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 986.239774] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528e2e30-ef86-25cc-cadf-74b7f38f37c5" [ 986.239774] env[62627]: _type = "HttpNfcLease" [ 986.239774] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 986.419033] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194684, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502256} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.419033] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] e9d33611-e373-4303-937d-b43d893e51f5/e9d33611-e373-4303-937d-b43d893e51f5.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 986.419033] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 986.419033] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75bc1819-ebea-4ae3-a95a-bd948cb0f099 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.425083] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 986.425083] env[62627]: value = "task-2194686" [ 986.425083] env[62627]: _type = "Task" [ 986.425083] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.433305] env[62627]: DEBUG nova.scheduler.client.report [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 986.437116] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194686, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.702888] env[62627]: DEBUG nova.compute.manager [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 986.704133] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9f2bac-f74a-4b29-a4af-6c393744beb0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.743330] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 986.743330] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528e2e30-ef86-25cc-cadf-74b7f38f37c5" [ 986.743330] env[62627]: _type = "HttpNfcLease" [ 986.743330] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 986.744330] env[62627]: DEBUG oslo_vmware.rw_handles [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 986.744330] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528e2e30-ef86-25cc-cadf-74b7f38f37c5" [ 986.744330] env[62627]: _type = "HttpNfcLease" [ 986.744330] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 986.745198] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c348b0c-dab1-4f91-b3c0-ef92e1d1cb98 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.757148] env[62627]: DEBUG oslo_vmware.rw_handles [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520851f7-ba25-b330-63d8-446612ce756d/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 986.757356] env[62627]: DEBUG oslo_vmware.rw_handles [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520851f7-ba25-b330-63d8-446612ce756d/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 986.872727] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a9139a5b-de6c-4419-8da3-ac2e49f333a8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.927895] env[62627]: DEBUG nova.network.neutron [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance_info_cache with network_info: [{"id": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "address": "fa:16:3e:6b:ef:0c", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5f7c0b-f0", "ovs_interfaceid": "4a5f7c0b-f029-481c-a96f-0d9789d6bad5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.938846] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.939373] env[62627]: DEBUG nova.compute.manager [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 986.941794] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194686, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067124} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.942707] env[62627]: DEBUG oslo_concurrency.lockutils [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.338s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.942928] env[62627]: DEBUG nova.objects.instance [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lazy-loading 'resources' on Instance uuid 38ab16cc-be4f-47b4-a818-ea864e853872 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 986.943981] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 986.945213] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69eaef2b-85a9-40df-9fb6-95fca4fc24b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.969534] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] e9d33611-e373-4303-937d-b43d893e51f5/e9d33611-e373-4303-937d-b43d893e51f5.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 986.970158] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49c92d7f-18c1-4d66-8fed-6cadc3e16e98 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.994958] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 986.994958] env[62627]: value = "task-2194687" [ 986.994958] env[62627]: _type = "Task" [ 986.994958] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.004057] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194687, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.220114] env[62627]: INFO nova.compute.manager [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] instance snapshotting [ 987.223484] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2519137-6043-484b-a885-20b82f38efa5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.245963] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2464f3d5-7614-4ca9-b929-1849b466ddec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.433758] env[62627]: DEBUG oslo_concurrency.lockutils [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Releasing lock "refresh_cache-892fc84b-02a9-4fa0-81b5-80326a060c64" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.434275] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 987.434544] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d19ebc1a-0ead-45a4-8ce3-1e49565a5b98 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.445754] env[62627]: DEBUG nova.compute.utils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 987.450235] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 987.450235] env[62627]: value = "task-2194688" [ 987.450235] env[62627]: _type = "Task" [ 987.450235] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.451106] env[62627]: DEBUG nova.compute.manager [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 987.451473] env[62627]: DEBUG nova.network.neutron [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 987.466131] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194688, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.511519] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194687, 'name': ReconfigVM_Task, 'duration_secs': 0.387684} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.511830] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Reconfigured VM instance instance-00000041 to attach disk [datastore1] e9d33611-e373-4303-937d-b43d893e51f5/e9d33611-e373-4303-937d-b43d893e51f5.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.512508] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-431611d1-412b-4b9b-9d98-b30816616571 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.515871] env[62627]: DEBUG nova.policy [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '719a4b973b0644d49796851afe6055c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecf9c58481c14634b2315248118ed1b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 987.527132] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 987.527132] env[62627]: value = "task-2194689" [ 987.527132] env[62627]: _type = "Task" [ 987.527132] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.538070] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194689, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.758371] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 987.758727] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6d435e2f-4877-4d26-9b4a-0f11a336f79d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.767896] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 987.767896] env[62627]: value = "task-2194690" [ 987.767896] env[62627]: _type = "Task" [ 987.767896] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.786914] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194690, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.862619] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7634f54-b02c-4908-bfc7-97f72bcdf631 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.875162] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3ae11e-396a-4611-aafe-8e87b8d2d86a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.910970] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577c1e19-bad9-4a4b-a517-2dfb5963d210 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.921461] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871bb1a0-8990-42be-813a-5708cd0ae135 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.927077] env[62627]: DEBUG nova.network.neutron [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Successfully created port: 2277dd85-8be7-4a0e-852b-451705959e88 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 987.943775] env[62627]: DEBUG nova.compute.provider_tree [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.951568] env[62627]: DEBUG nova.compute.manager [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 987.968738] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194688, 'name': PowerOffVM_Task, 'duration_secs': 0.492127} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.968738] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 987.968738] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:32:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='477bc599-8618-4d09-a5b7-e98bb785a942',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1076459801',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 987.968738] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 987.968738] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 987.968738] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 987.968738] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 987.968738] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 987.969174] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 987.969742] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 987.969742] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 987.970011] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 987.970347] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 987.975916] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-758675f7-2f31-4875-8b29-1bb2873f53a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.993532] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 987.993532] env[62627]: value = "task-2194691" [ 987.993532] env[62627]: _type = "Task" [ 987.993532] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.006198] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194691, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.037358] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194689, 'name': Rename_Task, 'duration_secs': 0.255879} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.037669] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 988.037939] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f84e137b-b632-4813-b229-bf81c5a545bf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.047151] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 988.047151] env[62627]: value = "task-2194692" [ 988.047151] env[62627]: _type = "Task" [ 988.047151] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.056205] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194692, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.278840] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194690, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.349157] env[62627]: DEBUG nova.network.neutron [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Successfully created port: 513e35e4-7ec3-4b36-82b0-b70d98706831 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 988.448337] env[62627]: DEBUG nova.scheduler.client.report [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 988.508424] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194691, 'name': ReconfigVM_Task, 'duration_secs': 0.463338} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.509467] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2f237d-3890-426e-94d7-65e56f97eeb5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.537026] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:32:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='477bc599-8618-4d09-a5b7-e98bb785a942',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1076459801',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 988.537265] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 988.537425] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 988.537646] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 988.537810] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 988.537928] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 988.538146] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 988.538306] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 988.538469] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 988.538628] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 988.538799] env[62627]: DEBUG nova.virt.hardware [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 988.539630] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-066110b1-8518-4c30-86b1-63267293d7e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.547264] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 988.547264] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529afd12-b2ea-3036-c486-da02d6b3014b" [ 988.547264] env[62627]: _type = "Task" [ 988.547264] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.559811] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529afd12-b2ea-3036-c486-da02d6b3014b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.564009] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194692, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.780533] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194690, 'name': CreateSnapshot_Task, 'duration_secs': 0.727437} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.780815] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 988.781616] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4899874d-65a7-4011-a103-42ddaa550338 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.954454] env[62627]: DEBUG oslo_concurrency.lockutils [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.012s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.957149] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.665s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.957434] env[62627]: DEBUG nova.objects.instance [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lazy-loading 'resources' on Instance uuid c560b907-500b-42fd-851b-61a04472b660 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.968978] env[62627]: DEBUG nova.compute.manager [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 988.978055] env[62627]: INFO nova.scheduler.client.report [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Deleted allocations for instance 38ab16cc-be4f-47b4-a818-ea864e853872 [ 988.993811] env[62627]: DEBUG nova.virt.hardware [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 988.994080] env[62627]: DEBUG nova.virt.hardware [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 988.994222] env[62627]: DEBUG nova.virt.hardware [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 988.994404] env[62627]: DEBUG nova.virt.hardware [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 988.994550] env[62627]: DEBUG nova.virt.hardware [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 988.994711] env[62627]: DEBUG nova.virt.hardware [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 988.994902] env[62627]: DEBUG nova.virt.hardware [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 988.995097] env[62627]: DEBUG nova.virt.hardware [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 988.995285] env[62627]: DEBUG nova.virt.hardware [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 988.995451] env[62627]: DEBUG nova.virt.hardware [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 988.995622] env[62627]: DEBUG nova.virt.hardware [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 988.996596] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b26adfd-dc58-496c-a603-5ebbe1cbbffa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.007981] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5788c8-1b33-4248-b83a-c13cb870cb04 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.060789] env[62627]: DEBUG oslo_vmware.api [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194692, 'name': PowerOnVM_Task, 'duration_secs': 0.577967} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.065363] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 989.065558] env[62627]: INFO nova.compute.manager [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Took 7.57 seconds to spawn the instance on the hypervisor. [ 989.065736] env[62627]: DEBUG nova.compute.manager [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 989.066066] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529afd12-b2ea-3036-c486-da02d6b3014b, 'name': SearchDatastore_Task, 'duration_secs': 0.014651} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.066842] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6228d6d8-2845-4a2b-831f-adb2cc3fd9e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.075332] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Reconfiguring VM instance instance-0000002d to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 989.076018] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-425a237a-ec66-4353-93d1-c1029a0d32f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.101114] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 989.101114] env[62627]: value = "task-2194693" [ 989.101114] env[62627]: _type = "Task" [ 989.101114] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.109991] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194693, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.306174] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 989.306530] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c0df99bb-bbbf-4147-8c77-d6eed7d16c65 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.316481] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 989.316481] env[62627]: value = "task-2194694" [ 989.316481] env[62627]: _type = "Task" [ 989.316481] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.325646] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194694, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.485899] env[62627]: DEBUG oslo_concurrency.lockutils [None req-acda6638-c9cf-460e-9dc1-b5ba7c2eb229 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "38ab16cc-be4f-47b4-a818-ea864e853872" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.867s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.616668] env[62627]: INFO nova.compute.manager [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Took 46.64 seconds to build instance. [ 989.622519] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194693, 'name': ReconfigVM_Task, 'duration_secs': 0.320467} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.624098] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Reconfigured VM instance instance-0000002d to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 989.624448] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73771e52-784b-4ab6-baba-b98537b0d5ad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.656228] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 892fc84b-02a9-4fa0-81b5-80326a060c64/892fc84b-02a9-4fa0-81b5-80326a060c64.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.661374] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f536e736-6809-4f5c-9b9a-46ab626a836f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.692114] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 989.692114] env[62627]: value = "task-2194695" [ 989.692114] env[62627]: _type = "Task" [ 989.692114] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.705571] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194695, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.830746] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194694, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.892758] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edca8bd3-5a93-4758-a228-67428922f5f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.898731] env[62627]: DEBUG nova.compute.manager [req-91dd0d6e-5245-4019-960b-aba80b5bc6e4 req-b0a26b3a-4d40-4a58-b91a-509e39e603f4 service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Received event network-vif-plugged-2277dd85-8be7-4a0e-852b-451705959e88 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 989.899081] env[62627]: DEBUG oslo_concurrency.lockutils [req-91dd0d6e-5245-4019-960b-aba80b5bc6e4 req-b0a26b3a-4d40-4a58-b91a-509e39e603f4 service nova] Acquiring lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.899357] env[62627]: DEBUG oslo_concurrency.lockutils [req-91dd0d6e-5245-4019-960b-aba80b5bc6e4 req-b0a26b3a-4d40-4a58-b91a-509e39e603f4 service nova] Lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.899645] env[62627]: DEBUG oslo_concurrency.lockutils [req-91dd0d6e-5245-4019-960b-aba80b5bc6e4 req-b0a26b3a-4d40-4a58-b91a-509e39e603f4 service nova] Lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.899902] env[62627]: DEBUG nova.compute.manager [req-91dd0d6e-5245-4019-960b-aba80b5bc6e4 req-b0a26b3a-4d40-4a58-b91a-509e39e603f4 service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] No waiting events found dispatching network-vif-plugged-2277dd85-8be7-4a0e-852b-451705959e88 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 989.900146] env[62627]: WARNING nova.compute.manager [req-91dd0d6e-5245-4019-960b-aba80b5bc6e4 req-b0a26b3a-4d40-4a58-b91a-509e39e603f4 service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Received unexpected event network-vif-plugged-2277dd85-8be7-4a0e-852b-451705959e88 for instance with vm_state building and task_state spawning. [ 989.907073] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9067e1ad-c38b-4adf-9135-7ecb859cfc13 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.944092] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9231cf3b-4eb4-49ae-8c38-80ecb5e938cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.952863] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df981cf-fee5-4ba5-8a6d-5dbbe034973c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.969057] env[62627]: DEBUG nova.compute.provider_tree [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.048248] env[62627]: DEBUG nova.network.neutron [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Successfully updated port: 2277dd85-8be7-4a0e-852b-451705959e88 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 990.119436] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b16a08f8-4bdd-43bf-a745-4144824f8aa2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.158s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.186047] env[62627]: DEBUG oslo_concurrency.lockutils [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "1129349d-65db-4e1d-bb08-cea793adf996" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.186047] env[62627]: DEBUG oslo_concurrency.lockutils [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "1129349d-65db-4e1d-bb08-cea793adf996" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.186047] env[62627]: DEBUG oslo_concurrency.lockutils [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "1129349d-65db-4e1d-bb08-cea793adf996-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.186047] env[62627]: DEBUG oslo_concurrency.lockutils [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "1129349d-65db-4e1d-bb08-cea793adf996-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.186345] env[62627]: DEBUG oslo_concurrency.lockutils [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "1129349d-65db-4e1d-bb08-cea793adf996-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.188481] env[62627]: INFO nova.compute.manager [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Terminating instance [ 990.206797] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194695, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.328112] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194694, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.473770] env[62627]: DEBUG nova.scheduler.client.report [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 990.693535] env[62627]: DEBUG nova.compute.manager [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 990.693937] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 990.695585] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d8750e-a705-4848-a606-51b9918aec90 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.714747] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194695, 'name': ReconfigVM_Task, 'duration_secs': 0.562725} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.718439] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 892fc84b-02a9-4fa0-81b5-80326a060c64/892fc84b-02a9-4fa0-81b5-80326a060c64.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.719074] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 990.720300] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f4e461-7f8c-4149-ada7-f7ee9e3ef069 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.724621] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a148ac04-cb2f-4118-b37d-935f56c9ea2a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.748688] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493c1fcc-d8c1-4a18-8149-7044159f92bb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.752384] env[62627]: DEBUG oslo_vmware.api [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 990.752384] env[62627]: value = "task-2194696" [ 990.752384] env[62627]: _type = "Task" [ 990.752384] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.775942] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb82d1a-3ec3-46c7-b6fd-6e99f229ecf0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.782230] env[62627]: DEBUG oslo_vmware.api [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194696, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.800620] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27f99cb-0ba8-40b9-98aa-e0d3f8ca5ccb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.809022] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.809345] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32868ba7-24e1-4116-801a-411f4c8432d9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.821021] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 990.821021] env[62627]: value = "task-2194697" [ 990.821021] env[62627]: _type = "Task" [ 990.821021] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.833541] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194697, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.838108] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194694, 'name': CloneVM_Task, 'duration_secs': 1.318723} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.838261] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Created linked-clone VM from snapshot [ 990.839030] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2cdc80-9635-42ca-9504-0256d8fb4f38 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.851034] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Uploading image 7bab65f6-f8ef-4aeb-a7ec-e77c0f2faa86 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 990.863725] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 990.864069] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9349ee11-5683-4e56-8e1f-15d4e0fb97d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.872883] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 990.872883] env[62627]: value = "task-2194698" [ 990.872883] env[62627]: _type = "Task" [ 990.872883] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.882751] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194698, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.978543] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.021s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.981374] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.613s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.981707] env[62627]: DEBUG nova.objects.instance [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lazy-loading 'resources' on Instance uuid 5d288606-02b6-4d11-b285-7325e48fb5b3 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 991.002957] env[62627]: INFO nova.scheduler.client.report [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Deleted allocations for instance c560b907-500b-42fd-851b-61a04472b660 [ 991.144670] env[62627]: DEBUG nova.compute.manager [req-c85b3b09-6e5a-40f9-8007-09dfd5447a74 req-47b1b829-0283-48d3-9748-e3545048af93 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Received event network-changed-5825a537-f659-4e7e-907e-e22208e06c0c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 991.144670] env[62627]: DEBUG nova.compute.manager [req-c85b3b09-6e5a-40f9-8007-09dfd5447a74 req-47b1b829-0283-48d3-9748-e3545048af93 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Refreshing instance network info cache due to event network-changed-5825a537-f659-4e7e-907e-e22208e06c0c. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 991.144969] env[62627]: DEBUG oslo_concurrency.lockutils [req-c85b3b09-6e5a-40f9-8007-09dfd5447a74 req-47b1b829-0283-48d3-9748-e3545048af93 service nova] Acquiring lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.145399] env[62627]: DEBUG oslo_concurrency.lockutils [req-c85b3b09-6e5a-40f9-8007-09dfd5447a74 req-47b1b829-0283-48d3-9748-e3545048af93 service nova] Acquired lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.145574] env[62627]: DEBUG nova.network.neutron [req-c85b3b09-6e5a-40f9-8007-09dfd5447a74 req-47b1b829-0283-48d3-9748-e3545048af93 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Refreshing network info cache for port 5825a537-f659-4e7e-907e-e22208e06c0c {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 991.263384] env[62627]: DEBUG oslo_vmware.api [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194696, 'name': PowerOffVM_Task, 'duration_secs': 0.352365} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.263691] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.263827] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.264101] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66085778-ea3a-49e3-baa0-f2c5f5f41cab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.330153] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194697, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.335132] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.335397] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.335566] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Deleting the datastore file [datastore2] 1129349d-65db-4e1d-bb08-cea793adf996 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.335845] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-265af4b4-76f5-40af-9be1-a3cfffffcd0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.344420] env[62627]: DEBUG oslo_vmware.api [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for the task: (returnval){ [ 991.344420] env[62627]: value = "task-2194700" [ 991.344420] env[62627]: _type = "Task" [ 991.344420] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.353185] env[62627]: DEBUG oslo_vmware.api [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194700, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.382771] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194698, 'name': Destroy_Task, 'duration_secs': 0.501973} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.383059] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Destroyed the VM [ 991.383306] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 991.383565] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5be54731-067b-4b59-8fa9-f2f49c834dac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.392117] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 991.392117] env[62627]: value = "task-2194701" [ 991.392117] env[62627]: _type = "Task" [ 991.392117] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.403183] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194701, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.510519] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d8337a53-c167-4f0e-bf22-c90bfb4511f0 tempest-ServerGroupTestJSON-1579268237 tempest-ServerGroupTestJSON-1579268237-project-member] Lock "c560b907-500b-42fd-851b-61a04472b660" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.647s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.821555] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdced6a-e9fe-4a55-8f0d-e7cfe56f44af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.843892] env[62627]: DEBUG oslo_vmware.api [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194697, 'name': PowerOnVM_Task, 'duration_secs': 0.646789} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.845117] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d256aa-eb44-4ea0-89dd-6146241a9bd9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.848266] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.863957] env[62627]: DEBUG oslo_vmware.api [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194700, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.896190] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0f6502-7137-436a-81ab-51532253931d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.914134] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f85c12-548c-4d8b-ae42-bd3c3952c086 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.916576] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194701, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.929166] env[62627]: DEBUG nova.compute.provider_tree [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.939506] env[62627]: DEBUG nova.compute.manager [req-dd62cce1-9636-4536-8815-aa0ff788c9ff req-fbcc76b5-d0a7-44f3-af9a-f431da3d6686 service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Received event network-changed-2277dd85-8be7-4a0e-852b-451705959e88 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 991.939840] env[62627]: DEBUG nova.compute.manager [req-dd62cce1-9636-4536-8815-aa0ff788c9ff req-fbcc76b5-d0a7-44f3-af9a-f431da3d6686 service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Refreshing instance network info cache due to event network-changed-2277dd85-8be7-4a0e-852b-451705959e88. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 991.939955] env[62627]: DEBUG oslo_concurrency.lockutils [req-dd62cce1-9636-4536-8815-aa0ff788c9ff req-fbcc76b5-d0a7-44f3-af9a-f431da3d6686 service nova] Acquiring lock "refresh_cache-493e7ea0-b160-4cd0-ae0b-bc46e4d89217" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.940068] env[62627]: DEBUG oslo_concurrency.lockutils [req-dd62cce1-9636-4536-8815-aa0ff788c9ff req-fbcc76b5-d0a7-44f3-af9a-f431da3d6686 service nova] Acquired lock "refresh_cache-493e7ea0-b160-4cd0-ae0b-bc46e4d89217" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.940239] env[62627]: DEBUG nova.network.neutron [req-dd62cce1-9636-4536-8815-aa0ff788c9ff req-fbcc76b5-d0a7-44f3-af9a-f431da3d6686 service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Refreshing network info cache for port 2277dd85-8be7-4a0e-852b-451705959e88 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 992.021478] env[62627]: DEBUG nova.network.neutron [req-c85b3b09-6e5a-40f9-8007-09dfd5447a74 req-47b1b829-0283-48d3-9748-e3545048af93 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updated VIF entry in instance network info cache for port 5825a537-f659-4e7e-907e-e22208e06c0c. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 992.022044] env[62627]: DEBUG nova.network.neutron [req-c85b3b09-6e5a-40f9-8007-09dfd5447a74 req-47b1b829-0283-48d3-9748-e3545048af93 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance_info_cache with network_info: [{"id": "5825a537-f659-4e7e-907e-e22208e06c0c", "address": "fa:16:3e:a9:0b:9d", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5825a537-f6", "ovs_interfaceid": "5825a537-f659-4e7e-907e-e22208e06c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.205350] env[62627]: DEBUG nova.network.neutron [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Successfully updated port: 513e35e4-7ec3-4b36-82b0-b70d98706831 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 992.370632] env[62627]: DEBUG oslo_vmware.api [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194700, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.412536] env[62627]: DEBUG oslo_vmware.api [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194701, 'name': RemoveSnapshot_Task, 'duration_secs': 0.848098} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.413890] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 992.432715] env[62627]: DEBUG nova.scheduler.client.report [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 992.483982] env[62627]: DEBUG nova.network.neutron [req-dd62cce1-9636-4536-8815-aa0ff788c9ff req-fbcc76b5-d0a7-44f3-af9a-f431da3d6686 service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 992.525615] env[62627]: DEBUG oslo_concurrency.lockutils [req-c85b3b09-6e5a-40f9-8007-09dfd5447a74 req-47b1b829-0283-48d3-9748-e3545048af93 service nova] Releasing lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.577092] env[62627]: DEBUG nova.network.neutron [req-dd62cce1-9636-4536-8815-aa0ff788c9ff req-fbcc76b5-d0a7-44f3-af9a-f431da3d6686 service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.708272] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "refresh_cache-493e7ea0-b160-4cd0-ae0b-bc46e4d89217" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.871247] env[62627]: DEBUG oslo_vmware.api [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194700, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.872827] env[62627]: INFO nova.compute.manager [None req-647bc27b-be10-4bac-b4e5-590e39ce9c31 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance to original state: 'active' [ 992.920502] env[62627]: WARNING nova.compute.manager [None req-befb409a-9e14-4215-9535-9e5bbe47f355 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Image not found during snapshot: nova.exception.ImageNotFound: Image 7bab65f6-f8ef-4aeb-a7ec-e77c0f2faa86 could not be found. [ 992.950962] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.956439] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.263s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.956439] env[62627]: DEBUG nova.objects.instance [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lazy-loading 'resources' on Instance uuid 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.027904] env[62627]: INFO nova.scheduler.client.report [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Deleted allocations for instance 5d288606-02b6-4d11-b285-7325e48fb5b3 [ 993.079784] env[62627]: DEBUG oslo_concurrency.lockutils [req-dd62cce1-9636-4536-8815-aa0ff788c9ff req-fbcc76b5-d0a7-44f3-af9a-f431da3d6686 service nova] Releasing lock "refresh_cache-493e7ea0-b160-4cd0-ae0b-bc46e4d89217" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.080269] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquired lock "refresh_cache-493e7ea0-b160-4cd0-ae0b-bc46e4d89217" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.080460] env[62627]: DEBUG nova.network.neutron [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.368099] env[62627]: DEBUG oslo_vmware.api [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Task: {'id': task-2194700, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.932388} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.368433] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.368634] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.368931] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.369262] env[62627]: INFO nova.compute.manager [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Took 2.68 seconds to destroy the instance on the hypervisor. [ 993.370257] env[62627]: DEBUG oslo.service.loopingcall [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.370257] env[62627]: DEBUG nova.compute.manager [-] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 993.370257] env[62627]: DEBUG nova.network.neutron [-] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 993.543544] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a02867fa-0d21-41cc-97cb-36aa98a7d386 tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "5d288606-02b6-4d11-b285-7325e48fb5b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.581s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.650901] env[62627]: DEBUG nova.network.neutron [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 993.674438] env[62627]: DEBUG oslo_concurrency.lockutils [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "0de040e4-8896-4f02-9284-f68100254631" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.675619] env[62627]: DEBUG oslo_concurrency.lockutils [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "0de040e4-8896-4f02-9284-f68100254631" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.675619] env[62627]: DEBUG oslo_concurrency.lockutils [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "0de040e4-8896-4f02-9284-f68100254631-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.675619] env[62627]: DEBUG oslo_concurrency.lockutils [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "0de040e4-8896-4f02-9284-f68100254631-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.675619] env[62627]: DEBUG oslo_concurrency.lockutils [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "0de040e4-8896-4f02-9284-f68100254631-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.678146] env[62627]: INFO nova.compute.manager [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Terminating instance [ 993.863951] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f730b47d-a0f0-4336-95c8-36c36c66e71c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.879146] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a37406a-a560-44d3-8b1d-d503e8449de6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.930098] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8962013-6e8d-42af-b5ca-1a2e06ab8870 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.940795] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3647beef-417d-470c-a057-67918d296365 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.957999] env[62627]: DEBUG nova.compute.provider_tree [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.052554] env[62627]: DEBUG nova.compute.manager [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Received event network-vif-plugged-513e35e4-7ec3-4b36-82b0-b70d98706831 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 994.052609] env[62627]: DEBUG oslo_concurrency.lockutils [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] Acquiring lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.052852] env[62627]: DEBUG oslo_concurrency.lockutils [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] Lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.052982] env[62627]: DEBUG oslo_concurrency.lockutils [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] Lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.053180] env[62627]: DEBUG nova.compute.manager [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] No waiting events found dispatching network-vif-plugged-513e35e4-7ec3-4b36-82b0-b70d98706831 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 994.053401] env[62627]: WARNING nova.compute.manager [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Received unexpected event network-vif-plugged-513e35e4-7ec3-4b36-82b0-b70d98706831 for instance with vm_state building and task_state spawning. [ 994.053586] env[62627]: DEBUG nova.compute.manager [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Received event network-changed-513e35e4-7ec3-4b36-82b0-b70d98706831 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 994.053685] env[62627]: DEBUG nova.compute.manager [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Refreshing instance network info cache due to event network-changed-513e35e4-7ec3-4b36-82b0-b70d98706831. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 994.053845] env[62627]: DEBUG oslo_concurrency.lockutils [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] Acquiring lock "refresh_cache-493e7ea0-b160-4cd0-ae0b-bc46e4d89217" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.077157] env[62627]: DEBUG nova.network.neutron [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Updating instance_info_cache with network_info: [{"id": "2277dd85-8be7-4a0e-852b-451705959e88", "address": "fa:16:3e:f0:f0:c3", "network": {"id": "bdf04bef-510b-44cc-a04f-bba278cd259d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-643299637", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2277dd85-8b", "ovs_interfaceid": "2277dd85-8be7-4a0e-852b-451705959e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "513e35e4-7ec3-4b36-82b0-b70d98706831", "address": "fa:16:3e:88:e9:7b", "network": {"id": "b37d9615-846a-4440-8895-dc51781f4d1e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-886296704", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513e35e4-7e", "ovs_interfaceid": "513e35e4-7ec3-4b36-82b0-b70d98706831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.135865] env[62627]: DEBUG nova.network.neutron [-] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.185769] env[62627]: DEBUG nova.compute.manager [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 994.185985] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.186980] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32938053-511e-4321-8f63-bf1e81e9232e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.197653] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.197653] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71c40670-ea34-45e7-9cc7-bc348777cceb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.204183] env[62627]: DEBUG oslo_vmware.rw_handles [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520851f7-ba25-b330-63d8-446612ce756d/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 994.205060] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b2ba47-bed7-482c-8e46-522d9847e796 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.209238] env[62627]: DEBUG oslo_vmware.api [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 994.209238] env[62627]: value = "task-2194703" [ 994.209238] env[62627]: _type = "Task" [ 994.209238] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.215343] env[62627]: DEBUG oslo_vmware.rw_handles [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520851f7-ba25-b330-63d8-446612ce756d/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 994.215665] env[62627]: ERROR oslo_vmware.rw_handles [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520851f7-ba25-b330-63d8-446612ce756d/disk-0.vmdk due to incomplete transfer. [ 994.216257] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5c94aaa8-adcb-471e-8bf9-d7ff3145d5fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.221852] env[62627]: DEBUG oslo_vmware.api [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194703, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.229919] env[62627]: DEBUG oslo_vmware.rw_handles [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520851f7-ba25-b330-63d8-446612ce756d/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 994.230241] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Uploaded image aeba0208-712f-4f27-adb0-2e5cef3d30a3 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 994.232839] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 994.233159] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-eb90e343-fd25-4c79-8aef-018bc771e741 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.242570] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 994.242570] env[62627]: value = "task-2194704" [ 994.242570] env[62627]: _type = "Task" [ 994.242570] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.254597] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194704, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.391031] env[62627]: DEBUG oslo_concurrency.lockutils [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "892fc84b-02a9-4fa0-81b5-80326a060c64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.391348] env[62627]: DEBUG oslo_concurrency.lockutils [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "892fc84b-02a9-4fa0-81b5-80326a060c64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.391557] env[62627]: DEBUG oslo_concurrency.lockutils [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "892fc84b-02a9-4fa0-81b5-80326a060c64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.391731] env[62627]: DEBUG oslo_concurrency.lockutils [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "892fc84b-02a9-4fa0-81b5-80326a060c64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.392035] env[62627]: DEBUG oslo_concurrency.lockutils [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "892fc84b-02a9-4fa0-81b5-80326a060c64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.395276] env[62627]: INFO nova.compute.manager [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Terminating instance [ 994.461772] env[62627]: DEBUG nova.scheduler.client.report [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 994.580495] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Releasing lock "refresh_cache-493e7ea0-b160-4cd0-ae0b-bc46e4d89217" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.580855] env[62627]: DEBUG nova.compute.manager [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Instance network_info: |[{"id": "2277dd85-8be7-4a0e-852b-451705959e88", "address": "fa:16:3e:f0:f0:c3", "network": {"id": "bdf04bef-510b-44cc-a04f-bba278cd259d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-643299637", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2277dd85-8b", "ovs_interfaceid": "2277dd85-8be7-4a0e-852b-451705959e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "513e35e4-7ec3-4b36-82b0-b70d98706831", "address": "fa:16:3e:88:e9:7b", "network": {"id": "b37d9615-846a-4440-8895-dc51781f4d1e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-886296704", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513e35e4-7e", "ovs_interfaceid": "513e35e4-7ec3-4b36-82b0-b70d98706831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 994.581143] env[62627]: DEBUG oslo_concurrency.lockutils [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] Acquired lock "refresh_cache-493e7ea0-b160-4cd0-ae0b-bc46e4d89217" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.581373] env[62627]: DEBUG nova.network.neutron [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Refreshing network info cache for port 513e35e4-7ec3-4b36-82b0-b70d98706831 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.582733] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:f0:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8614db14-cc04-466b-b309-367ab8296cda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2277dd85-8be7-4a0e-852b-451705959e88', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:e9:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '513e35e4-7ec3-4b36-82b0-b70d98706831', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.594133] env[62627]: DEBUG oslo.service.loopingcall [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 994.597958] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 994.598656] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63704028-abbd-42f5-9fd1-92b9a4698a4d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.627886] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.627886] env[62627]: value = "task-2194705" [ 994.627886] env[62627]: _type = "Task" [ 994.627886] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.641871] env[62627]: INFO nova.compute.manager [-] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Took 1.27 seconds to deallocate network for instance. [ 994.643025] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194705, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.726784] env[62627]: DEBUG oslo_vmware.api [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194703, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.755881] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194704, 'name': Destroy_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.881088] env[62627]: DEBUG nova.network.neutron [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Updated VIF entry in instance network info cache for port 513e35e4-7ec3-4b36-82b0-b70d98706831. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 994.881550] env[62627]: DEBUG nova.network.neutron [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Updating instance_info_cache with network_info: [{"id": "2277dd85-8be7-4a0e-852b-451705959e88", "address": "fa:16:3e:f0:f0:c3", "network": {"id": "bdf04bef-510b-44cc-a04f-bba278cd259d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-643299637", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2277dd85-8b", "ovs_interfaceid": "2277dd85-8be7-4a0e-852b-451705959e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "513e35e4-7ec3-4b36-82b0-b70d98706831", "address": "fa:16:3e:88:e9:7b", "network": {"id": "b37d9615-846a-4440-8895-dc51781f4d1e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-886296704", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513e35e4-7e", "ovs_interfaceid": "513e35e4-7ec3-4b36-82b0-b70d98706831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.900091] env[62627]: DEBUG nova.compute.manager [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 994.900849] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.901505] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c353b0-f286-4ae9-bb7d-bd2c8079d64f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.914263] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.914429] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34b2fa92-f099-46f2-8151-e572010e6f15 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.928541] env[62627]: DEBUG oslo_vmware.api [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 994.928541] env[62627]: value = "task-2194706" [ 994.928541] env[62627]: _type = "Task" [ 994.928541] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.945178] env[62627]: DEBUG oslo_vmware.api [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.969059] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.017s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.971900] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.662s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.972491] env[62627]: DEBUG nova.objects.instance [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lazy-loading 'resources' on Instance uuid cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.991888] env[62627]: INFO nova.scheduler.client.report [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Deleted allocations for instance 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a [ 995.140926] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194705, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.151219] env[62627]: DEBUG oslo_concurrency.lockutils [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.219679] env[62627]: DEBUG oslo_vmware.api [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194703, 'name': PowerOffVM_Task, 'duration_secs': 0.577382} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.220417] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.220753] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.221045] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f29a2bd6-991f-4dce-a113-6b2093fc77a2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.253143] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194704, 'name': Destroy_Task, 'duration_secs': 0.629597} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.253427] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Destroyed the VM [ 995.253670] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 995.253927] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6970361d-91df-4a3b-a1e9-f0915a6c73dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.260371] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 995.260371] env[62627]: value = "task-2194708" [ 995.260371] env[62627]: _type = "Task" [ 995.260371] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.271212] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194708, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.288562] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.288764] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.288974] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Deleting the datastore file [datastore1] 0de040e4-8896-4f02-9284-f68100254631 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.289264] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-840fbf39-152e-4381-90dd-1d4a4ea33136 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.297193] env[62627]: DEBUG oslo_vmware.api [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 995.297193] env[62627]: value = "task-2194709" [ 995.297193] env[62627]: _type = "Task" [ 995.297193] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.305272] env[62627]: DEBUG oslo_vmware.api [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.385028] env[62627]: DEBUG oslo_concurrency.lockutils [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] Releasing lock "refresh_cache-493e7ea0-b160-4cd0-ae0b-bc46e4d89217" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.385028] env[62627]: DEBUG nova.compute.manager [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Received event network-vif-deleted-f91eb28c-7881-4e2c-b51e-7c0f169652d1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 995.385384] env[62627]: INFO nova.compute.manager [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Neutron deleted interface f91eb28c-7881-4e2c-b51e-7c0f169652d1; detaching it from the instance and deleting it from the info cache [ 995.385794] env[62627]: DEBUG nova.network.neutron [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.441607] env[62627]: DEBUG oslo_vmware.api [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194706, 'name': PowerOffVM_Task, 'duration_secs': 0.284643} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.441607] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.441607] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.441607] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e29d6a79-550f-4724-ae28-c695c3d7aacb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.507396] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14d7e9fc-3bd6-4888-8aa6-805b80b62089 tempest-ServersAaction247Test-814113647 tempest-ServersAaction247Test-814113647-project-member] Lock "8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.373s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.546163] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.546645] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.546742] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Deleting the datastore file [datastore1] 892fc84b-02a9-4fa0-81b5-80326a060c64 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.547155] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae55c0d4-fcba-4352-8443-d26e41ab1a62 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.557417] env[62627]: DEBUG oslo_vmware.api [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 995.557417] env[62627]: value = "task-2194711" [ 995.557417] env[62627]: _type = "Task" [ 995.557417] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.573665] env[62627]: DEBUG oslo_vmware.api [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194711, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.608750] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "7184a7d2-72c6-48f2-b125-880f1f9858c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.609164] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "7184a7d2-72c6-48f2-b125-880f1f9858c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.641985] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194705, 'name': CreateVM_Task, 'duration_secs': 0.516714} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.644676] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 995.646384] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.646384] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.646608] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 995.649035] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05d58617-cced-41bb-8a72-100df4282ed3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.652705] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 995.652705] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]524f6c0d-ee1d-9a9c-14fc-0424276863c8" [ 995.652705] env[62627]: _type = "Task" [ 995.652705] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.663693] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524f6c0d-ee1d-9a9c-14fc-0424276863c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.778345] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194708, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.810332] env[62627]: DEBUG oslo_vmware.api [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194709, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19143} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.810687] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.810919] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 995.811303] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 995.811397] env[62627]: INFO nova.compute.manager [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: 0de040e4-8896-4f02-9284-f68100254631] Took 1.63 seconds to destroy the instance on the hypervisor. [ 995.811678] env[62627]: DEBUG oslo.service.loopingcall [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.814585] env[62627]: DEBUG nova.compute.manager [-] [instance: 0de040e4-8896-4f02-9284-f68100254631] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 995.814690] env[62627]: DEBUG nova.network.neutron [-] [instance: 0de040e4-8896-4f02-9284-f68100254631] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 995.853740] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef34239-de71-4098-b1fc-57ccd0654f99 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.862189] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8148c3f7-69e6-4c0b-b2c0-d07ec3516e23 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.901906] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0d67f75-2f8b-4679-946e-5d27010e630f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.904494] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec77cc02-f713-446a-b692-1e083faf80b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.917334] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03cfb60-beff-45b7-a1e1-e74b065c07d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.924368] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e70b850-258d-4aa8-86c1-05da9d8be388 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.956238] env[62627]: DEBUG nova.compute.provider_tree [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.970507] env[62627]: DEBUG nova.compute.manager [req-12035de6-c7c5-49c9-80ee-adcd206f8a6e req-ab7bc58b-4ea2-4dba-913a-267c5799b10d service nova] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Detach interface failed, port_id=f91eb28c-7881-4e2c-b51e-7c0f169652d1, reason: Instance 1129349d-65db-4e1d-bb08-cea793adf996 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 996.069900] env[62627]: DEBUG oslo_vmware.api [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194711, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256483} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.070182] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.070369] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 996.070543] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 996.070712] env[62627]: INFO nova.compute.manager [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Took 1.17 seconds to destroy the instance on the hypervisor. [ 996.070944] env[62627]: DEBUG oslo.service.loopingcall [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 996.071202] env[62627]: DEBUG nova.compute.manager [-] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 996.073035] env[62627]: DEBUG nova.network.neutron [-] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 996.111848] env[62627]: DEBUG nova.compute.manager [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 996.166657] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524f6c0d-ee1d-9a9c-14fc-0424276863c8, 'name': SearchDatastore_Task, 'duration_secs': 0.032323} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.169021] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.169021] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.169021] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.169021] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.169021] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 996.169021] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dcaae43-81ee-4bc7-8370-ead2346eac26 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.184206] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 996.184413] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 996.185193] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02160449-7384-4816-936b-84581d286c27 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.191745] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 996.191745] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52adaf8b-0d53-24d9-4716-508769e67a10" [ 996.191745] env[62627]: _type = "Task" [ 996.191745] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.200957] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52adaf8b-0d53-24d9-4716-508769e67a10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.211301] env[62627]: DEBUG nova.compute.manager [req-7df4ef32-7dbd-49a5-9705-d096ef9255d1 req-054b26b4-0e96-4051-aa47-7c952bc45017 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] Received event network-vif-deleted-b70ec01a-fb73-4c5e-9db9-88792bcf78d7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 996.211473] env[62627]: INFO nova.compute.manager [req-7df4ef32-7dbd-49a5-9705-d096ef9255d1 req-054b26b4-0e96-4051-aa47-7c952bc45017 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] Neutron deleted interface b70ec01a-fb73-4c5e-9db9-88792bcf78d7; detaching it from the instance and deleting it from the info cache [ 996.211647] env[62627]: DEBUG nova.network.neutron [req-7df4ef32-7dbd-49a5-9705-d096ef9255d1 req-054b26b4-0e96-4051-aa47-7c952bc45017 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.272776] env[62627]: DEBUG oslo_vmware.api [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194708, 'name': RemoveSnapshot_Task, 'duration_secs': 0.653874} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.273219] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 996.273503] env[62627]: INFO nova.compute.manager [None req-cfcb3892-a9ca-4d2f-9bed-b14571b6a0cd tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Took 14.20 seconds to snapshot the instance on the hypervisor. [ 996.431754] env[62627]: DEBUG nova.compute.manager [req-595bca28-cf99-4dcc-9dee-2eac512bd9ac req-2dd6b1d5-ebce-45e0-b3d7-cc0f0db4e227 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Received event network-vif-deleted-4a5f7c0b-f029-481c-a96f-0d9789d6bad5 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 996.431821] env[62627]: INFO nova.compute.manager [req-595bca28-cf99-4dcc-9dee-2eac512bd9ac req-2dd6b1d5-ebce-45e0-b3d7-cc0f0db4e227 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Neutron deleted interface 4a5f7c0b-f029-481c-a96f-0d9789d6bad5; detaching it from the instance and deleting it from the info cache [ 996.432031] env[62627]: DEBUG nova.network.neutron [req-595bca28-cf99-4dcc-9dee-2eac512bd9ac req-2dd6b1d5-ebce-45e0-b3d7-cc0f0db4e227 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.458942] env[62627]: DEBUG oslo_concurrency.lockutils [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "a397bce6-30c7-485d-81ab-88a8e021996c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.459224] env[62627]: DEBUG oslo_concurrency.lockutils [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.459436] env[62627]: DEBUG oslo_concurrency.lockutils [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "a397bce6-30c7-485d-81ab-88a8e021996c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.459624] env[62627]: DEBUG oslo_concurrency.lockutils [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.459792] env[62627]: DEBUG oslo_concurrency.lockutils [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.462098] env[62627]: INFO nova.compute.manager [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Terminating instance [ 996.472870] env[62627]: DEBUG nova.scheduler.client.report [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 996.638920] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.659545] env[62627]: DEBUG nova.network.neutron [-] [instance: 0de040e4-8896-4f02-9284-f68100254631] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.703822] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52adaf8b-0d53-24d9-4716-508769e67a10, 'name': SearchDatastore_Task, 'duration_secs': 0.032406} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.704531] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18261001-95ba-4c76-8867-c0867b51bc18 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.712037] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 996.712037] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529780d2-2c35-bf9d-4502-c79ee760a28a" [ 996.712037] env[62627]: _type = "Task" [ 996.712037] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.716286] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e2f2b95-59b9-45af-a92d-d73845f0e9bb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.723425] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529780d2-2c35-bf9d-4502-c79ee760a28a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.727547] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d3a5da-dccc-485c-a8d8-7fefe7a45a93 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.766565] env[62627]: DEBUG nova.compute.manager [req-7df4ef32-7dbd-49a5-9705-d096ef9255d1 req-054b26b4-0e96-4051-aa47-7c952bc45017 service nova] [instance: 0de040e4-8896-4f02-9284-f68100254631] Detach interface failed, port_id=b70ec01a-fb73-4c5e-9db9-88792bcf78d7, reason: Instance 0de040e4-8896-4f02-9284-f68100254631 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 996.916050] env[62627]: DEBUG nova.network.neutron [-] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.935798] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d5c895b-6f5e-4468-96e7-9c47cb407fdc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.946162] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c0ef9f-27a3-410a-81c9-f6dbc86a9c44 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.966749] env[62627]: DEBUG nova.compute.manager [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 996.966749] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 996.967918] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565daab3-d50a-498f-b64d-bd15b3e9fc65 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.984616] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.013s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.986876] env[62627]: DEBUG nova.compute.manager [req-595bca28-cf99-4dcc-9dee-2eac512bd9ac req-2dd6b1d5-ebce-45e0-b3d7-cc0f0db4e227 service nova] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Detach interface failed, port_id=4a5f7c0b-f029-481c-a96f-0d9789d6bad5, reason: Instance 892fc84b-02a9-4fa0-81b5-80326a060c64 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 996.987927] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.091s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.989382] env[62627]: INFO nova.compute.claims [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.994211] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 996.994405] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f01c57e-ba26-4dd9-b5f3-e23e6d63d083 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.002060] env[62627]: DEBUG oslo_vmware.api [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 997.002060] env[62627]: value = "task-2194712" [ 997.002060] env[62627]: _type = "Task" [ 997.002060] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.011805] env[62627]: DEBUG oslo_vmware.api [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194712, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.012452] env[62627]: INFO nova.scheduler.client.report [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Deleted allocations for instance cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a [ 997.162051] env[62627]: INFO nova.compute.manager [-] [instance: 0de040e4-8896-4f02-9284-f68100254631] Took 1.35 seconds to deallocate network for instance. [ 997.223817] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529780d2-2c35-bf9d-4502-c79ee760a28a, 'name': SearchDatastore_Task, 'duration_secs': 0.045987} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.224123] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.224384] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 493e7ea0-b160-4cd0-ae0b-bc46e4d89217/493e7ea0-b160-4cd0-ae0b-bc46e4d89217.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 997.224638] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13646e4d-76d4-4a8a-9acd-dbbce8bcf5eb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.233401] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 997.233401] env[62627]: value = "task-2194713" [ 997.233401] env[62627]: _type = "Task" [ 997.233401] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.244082] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.418502] env[62627]: INFO nova.compute.manager [-] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Took 1.35 seconds to deallocate network for instance. [ 997.517866] env[62627]: DEBUG oslo_vmware.api [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194712, 'name': PowerOffVM_Task, 'duration_secs': 0.215452} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.519943] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 997.520153] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 997.520902] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9f5bcb25-ae27-4738-a9e0-f350bba5a060 tempest-ListImageFiltersTestJSON-306267884 tempest-ListImageFiltersTestJSON-306267884-project-member] Lock "cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.785s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.521884] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25f169c3-2971-4815-a702-273037b15edd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.604547] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 997.604712] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 997.604878] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Deleting the datastore file [datastore2] a397bce6-30c7-485d-81ab-88a8e021996c {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 997.605353] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f22b739-ece9-44e5-9212-1c1acd40e813 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.616448] env[62627]: DEBUG oslo_vmware.api [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for the task: (returnval){ [ 997.616448] env[62627]: value = "task-2194715" [ 997.616448] env[62627]: _type = "Task" [ 997.616448] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.626591] env[62627]: DEBUG oslo_vmware.api [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194715, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.672505] env[62627]: DEBUG oslo_concurrency.lockutils [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.744887] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194713, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471025} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.745402] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 493e7ea0-b160-4cd0-ae0b-bc46e4d89217/493e7ea0-b160-4cd0-ae0b-bc46e4d89217.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 997.745832] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 997.745899] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0382f9bb-ab36-46c7-8abd-0702a0a4b786 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.756986] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 997.756986] env[62627]: value = "task-2194716" [ 997.756986] env[62627]: _type = "Task" [ 997.756986] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.768287] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194716, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.928230] env[62627]: DEBUG oslo_concurrency.lockutils [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.129604] env[62627]: DEBUG oslo_vmware.api [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Task: {'id': task-2194715, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20689} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.130330] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 998.130330] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 998.130330] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 998.130464] env[62627]: INFO nova.compute.manager [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 998.130660] env[62627]: DEBUG oslo.service.loopingcall [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 998.130917] env[62627]: DEBUG nova.compute.manager [-] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 998.130963] env[62627]: DEBUG nova.network.neutron [-] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 998.267268] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194716, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104015} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.270526] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 998.272030] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07d0dc2-076e-4706-8437-7fc08171bcd6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.300854] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 493e7ea0-b160-4cd0-ae0b-bc46e4d89217/493e7ea0-b160-4cd0-ae0b-bc46e4d89217.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.303835] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c271112-93ae-4be7-aa21-9cd7e0c28a5e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.325597] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 998.325597] env[62627]: value = "task-2194717" [ 998.325597] env[62627]: _type = "Task" [ 998.325597] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.339527] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194717, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.395604] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc419b01-a79b-4996-83fd-c4dc8d84f6d1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.403928] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06c2c71-8e0e-498c-a68b-6947ddb73023 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.440556] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52decb33-c83e-4c5c-8070-876f4d0f6eb4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.449842] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0518f38c-0622-4721-8df2-a0b78f153dac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.466052] env[62627]: DEBUG nova.compute.provider_tree [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.842274] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.969634] env[62627]: DEBUG nova.scheduler.client.report [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 999.006214] env[62627]: DEBUG nova.compute.manager [req-e9857c97-294a-484a-a330-359484dd4831 req-19b717ed-6bf3-458f-bfb3-2800c95f3022 service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Received event network-vif-deleted-e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 999.006707] env[62627]: INFO nova.compute.manager [req-e9857c97-294a-484a-a330-359484dd4831 req-19b717ed-6bf3-458f-bfb3-2800c95f3022 service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Neutron deleted interface e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991; detaching it from the instance and deleting it from the info cache [ 999.006910] env[62627]: DEBUG nova.network.neutron [req-e9857c97-294a-484a-a330-359484dd4831 req-19b717ed-6bf3-458f-bfb3-2800c95f3022 service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.339828] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194717, 'name': ReconfigVM_Task, 'duration_secs': 0.902525} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.340206] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 493e7ea0-b160-4cd0-ae0b-bc46e4d89217/493e7ea0-b160-4cd0-ae0b-bc46e4d89217.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.340931] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65572886-6337-47f7-bc51-a60a51e3fa1e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.348934] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 999.348934] env[62627]: value = "task-2194718" [ 999.348934] env[62627]: _type = "Task" [ 999.348934] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.358639] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194718, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.469322] env[62627]: DEBUG nova.network.neutron [-] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.478314] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.478831] env[62627]: DEBUG nova.compute.manager [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 999.481439] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 26.814s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.510219] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cda3fc2d-8d14-4f43-be93-e5c95df887b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.528036] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6e1fb0-d2ab-414a-afa3-948cd0236867 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.571440] env[62627]: DEBUG nova.compute.manager [req-e9857c97-294a-484a-a330-359484dd4831 req-19b717ed-6bf3-458f-bfb3-2800c95f3022 service nova] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Detach interface failed, port_id=e56ec5b3-09ba-4f3a-9238-ed7d6c1f7991, reason: Instance a397bce6-30c7-485d-81ab-88a8e021996c could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 999.860073] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194718, 'name': Rename_Task, 'duration_secs': 0.156461} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.860358] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.860503] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e26db09-d8c5-4577-87e7-e90b0c53a08f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.868922] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 999.868922] env[62627]: value = "task-2194719" [ 999.868922] env[62627]: _type = "Task" [ 999.868922] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.879850] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194719, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.972730] env[62627]: INFO nova.compute.manager [-] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Took 1.84 seconds to deallocate network for instance. [ 999.993640] env[62627]: DEBUG nova.compute.utils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 999.995113] env[62627]: DEBUG nova.compute.manager [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 999.995395] env[62627]: DEBUG nova.network.neutron [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1000.062512] env[62627]: DEBUG nova.policy [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17e81860aeac405389936451e18681f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd8bbc8b42b7b40f8aff57e0f5d9340b9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1000.363709] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquiring lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.363951] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.382548] env[62627]: DEBUG oslo_vmware.api [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194719, 'name': PowerOnVM_Task, 'duration_secs': 0.506448} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.384083] env[62627]: DEBUG nova.network.neutron [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Successfully created port: 85488276-f200-4480-ace3-01452fa9a93f {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1000.386066] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1000.386319] env[62627]: INFO nova.compute.manager [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Took 11.42 seconds to spawn the instance on the hypervisor. [ 1000.386513] env[62627]: DEBUG nova.compute.manager [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1000.387799] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72cf452-a681-4dc5-b04a-a2ae3de94723 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.480399] env[62627]: DEBUG oslo_concurrency.lockutils [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.500294] env[62627]: DEBUG nova.compute.manager [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1000.525885] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 7366e4b0-bdba-435d-844d-8e81c4464b31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.526068] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance c1fd6afb-9332-446d-a441-5942b60edc94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.526238] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance a397bce6-30c7-485d-81ab-88a8e021996c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.526368] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance ab999825-4310-4fd8-81f0-6bb5dde4175b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.526514] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance b1ba58c1-340f-486a-8f47-86fbc7b27485 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1000.526630] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 241311b0-ba81-41d1-a9b6-cd40d04dbe3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.526820] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance aa181094-cb78-44e0-8d7a-1d677e0af1fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.526870] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 1129349d-65db-4e1d-bb08-cea793adf996 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1000.526975] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.527200] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 457e5aa6-d229-4f48-8817-e918ec4bd0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.527365] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 66882603-6ff6-49c2-9f42-3521ecaf98b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.527525] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 892fc84b-02a9-4fa0-81b5-80326a060c64 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1000.527667] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance ae4ec050-5062-4e57-8090-f67f8c8013ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.527805] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.527942] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 84b8381c-bdd4-462e-aa98-0cc972f00882 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.528094] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance f6357f01-d7cb-4477-be2d-98fc17d7a940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.529161] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.529161] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 0de040e4-8896-4f02-9284-f68100254631 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1000.529161] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance e9d33611-e373-4303-937d-b43d893e51f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.529161] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 493e7ea0-b160-4cd0-ae0b-bc46e4d89217 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.529161] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 9adb2baf-315f-4122-a657-cb99757002ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1000.866587] env[62627]: DEBUG nova.compute.manager [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1000.911875] env[62627]: INFO nova.compute.manager [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Took 49.03 seconds to build instance. [ 1001.032323] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 07b08283-cbe5-4c36-9127-c423b259643d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.401683] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.414194] env[62627]: DEBUG oslo_concurrency.lockutils [None req-401141a2-aece-4efb-b266-5ea3062df309 tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.543s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.516986] env[62627]: DEBUG nova.compute.manager [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1001.535982] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 7184a7d2-72c6-48f2-b125-880f1f9858c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1001.546010] env[62627]: DEBUG nova.virt.hardware [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1001.546288] env[62627]: DEBUG nova.virt.hardware [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1001.546466] env[62627]: DEBUG nova.virt.hardware [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1001.546657] env[62627]: DEBUG nova.virt.hardware [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1001.546805] env[62627]: DEBUG nova.virt.hardware [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1001.546952] env[62627]: DEBUG nova.virt.hardware [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1001.547174] env[62627]: DEBUG nova.virt.hardware [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1001.547344] env[62627]: DEBUG nova.virt.hardware [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1001.547507] env[62627]: DEBUG nova.virt.hardware [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1001.547668] env[62627]: DEBUG nova.virt.hardware [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1001.547840] env[62627]: DEBUG nova.virt.hardware [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1001.548699] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbaaedc-900a-49de-b4d4-d4de2aa04f64 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.557999] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39ea0c7-a2f7-4e45-8cce-363c92bd38ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.803801] env[62627]: DEBUG nova.compute.manager [req-6f22f7b6-e2b8-4629-98a7-d7934e8c62cd req-662c1a94-de50-48c6-8da3-da0238e8dc24 service nova] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Received event network-vif-plugged-85488276-f200-4480-ace3-01452fa9a93f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1001.803801] env[62627]: DEBUG oslo_concurrency.lockutils [req-6f22f7b6-e2b8-4629-98a7-d7934e8c62cd req-662c1a94-de50-48c6-8da3-da0238e8dc24 service nova] Acquiring lock "9adb2baf-315f-4122-a657-cb99757002ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.803801] env[62627]: DEBUG oslo_concurrency.lockutils [req-6f22f7b6-e2b8-4629-98a7-d7934e8c62cd req-662c1a94-de50-48c6-8da3-da0238e8dc24 service nova] Lock "9adb2baf-315f-4122-a657-cb99757002ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.803801] env[62627]: DEBUG oslo_concurrency.lockutils [req-6f22f7b6-e2b8-4629-98a7-d7934e8c62cd req-662c1a94-de50-48c6-8da3-da0238e8dc24 service nova] Lock "9adb2baf-315f-4122-a657-cb99757002ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.804390] env[62627]: DEBUG nova.compute.manager [req-6f22f7b6-e2b8-4629-98a7-d7934e8c62cd req-662c1a94-de50-48c6-8da3-da0238e8dc24 service nova] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] No waiting events found dispatching network-vif-plugged-85488276-f200-4480-ace3-01452fa9a93f {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1001.804711] env[62627]: WARNING nova.compute.manager [req-6f22f7b6-e2b8-4629-98a7-d7934e8c62cd req-662c1a94-de50-48c6-8da3-da0238e8dc24 service nova] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Received unexpected event network-vif-plugged-85488276-f200-4480-ace3-01452fa9a93f for instance with vm_state building and task_state spawning. [ 1001.890361] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.890361] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.890361] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.890361] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.890361] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.891373] env[62627]: INFO nova.compute.manager [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Terminating instance [ 1001.907882] env[62627]: DEBUG nova.network.neutron [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Successfully updated port: 85488276-f200-4480-ace3-01452fa9a93f {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1002.041884] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.041884] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1002.042034] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1002.287849] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d271ec5-a1ab-48fb-97a2-c1680c7d7e3c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.296281] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404a5d86-965a-4be0-acae-442304092570 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.328286] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49609eaf-233b-46c2-b903-149324b927dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.336176] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca96fd3-c1a2-45fa-b011-14d8ae2b32a7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.350160] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.394738] env[62627]: DEBUG nova.compute.manager [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1002.394968] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1002.396530] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6fef32-cb73-4b76-9a88-8f419d296ade {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.404970] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.405269] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-312a9c78-66f1-42d2-bd85-ad5b2e8ee09a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.409284] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquiring lock "refresh_cache-9adb2baf-315f-4122-a657-cb99757002ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.409396] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquired lock "refresh_cache-9adb2baf-315f-4122-a657-cb99757002ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.409572] env[62627]: DEBUG nova.network.neutron [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.412385] env[62627]: DEBUG oslo_vmware.api [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 1002.412385] env[62627]: value = "task-2194720" [ 1002.412385] env[62627]: _type = "Task" [ 1002.412385] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.421084] env[62627]: DEBUG oslo_vmware.api [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.854897] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.923385] env[62627]: DEBUG oslo_vmware.api [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194720, 'name': PowerOffVM_Task, 'duration_secs': 0.338465} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.923694] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1002.923835] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1002.924079] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ef9ffec-746d-4cc1-81f1-bf755198da78 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.950758] env[62627]: DEBUG nova.network.neutron [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1003.080052] env[62627]: DEBUG nova.network.neutron [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Updating instance_info_cache with network_info: [{"id": "85488276-f200-4480-ace3-01452fa9a93f", "address": "fa:16:3e:bb:66:62", "network": {"id": "bb9e36ff-b937-43e9-b4a0-16e4905379f9", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1535008076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8bbc8b42b7b40f8aff57e0f5d9340b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85488276-f2", "ovs_interfaceid": "85488276-f200-4480-ace3-01452fa9a93f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.360408] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1003.360581] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.879s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.360876] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.846s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.362420] env[62627]: INFO nova.compute.claims [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1003.365115] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.365275] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Cleaning up deleted instances {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1003.583018] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Releasing lock "refresh_cache-9adb2baf-315f-4122-a657-cb99757002ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.583367] env[62627]: DEBUG nova.compute.manager [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Instance network_info: |[{"id": "85488276-f200-4480-ace3-01452fa9a93f", "address": "fa:16:3e:bb:66:62", "network": {"id": "bb9e36ff-b937-43e9-b4a0-16e4905379f9", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1535008076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8bbc8b42b7b40f8aff57e0f5d9340b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85488276-f2", "ovs_interfaceid": "85488276-f200-4480-ace3-01452fa9a93f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1003.583807] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:66:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74e6f6e0-95e6-4531-99e9-0e78350fb655', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85488276-f200-4480-ace3-01452fa9a93f', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1003.592174] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Creating folder: Project (d8bbc8b42b7b40f8aff57e0f5d9340b9). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1003.592876] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-238d5643-3d37-4e9c-98bd-f23acb1dc935 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.608226] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Created folder: Project (d8bbc8b42b7b40f8aff57e0f5d9340b9) in parent group-v447541. [ 1003.608484] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Creating folder: Instances. Parent ref: group-v447725. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1003.608818] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eccec65b-0a26-4ee3-b647-b3a387782144 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.621623] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Created folder: Instances in parent group-v447725. [ 1003.621886] env[62627]: DEBUG oslo.service.loopingcall [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1003.622104] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1003.622319] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6744bffd-2f40-480a-8fa9-4b0a1b9e0b10 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.644205] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1003.644205] env[62627]: value = "task-2194724" [ 1003.644205] env[62627]: _type = "Task" [ 1003.644205] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.653083] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194724, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.837100] env[62627]: DEBUG nova.compute.manager [req-6b803f09-d3f0-470b-8f23-0dd0550a842d req-f04c28da-b73e-42be-b322-db32d31e9022 service nova] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Received event network-changed-85488276-f200-4480-ace3-01452fa9a93f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1003.837423] env[62627]: DEBUG nova.compute.manager [req-6b803f09-d3f0-470b-8f23-0dd0550a842d req-f04c28da-b73e-42be-b322-db32d31e9022 service nova] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Refreshing instance network info cache due to event network-changed-85488276-f200-4480-ace3-01452fa9a93f. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1003.837612] env[62627]: DEBUG oslo_concurrency.lockutils [req-6b803f09-d3f0-470b-8f23-0dd0550a842d req-f04c28da-b73e-42be-b322-db32d31e9022 service nova] Acquiring lock "refresh_cache-9adb2baf-315f-4122-a657-cb99757002ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.837667] env[62627]: DEBUG oslo_concurrency.lockutils [req-6b803f09-d3f0-470b-8f23-0dd0550a842d req-f04c28da-b73e-42be-b322-db32d31e9022 service nova] Acquired lock "refresh_cache-9adb2baf-315f-4122-a657-cb99757002ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.837835] env[62627]: DEBUG nova.network.neutron [req-6b803f09-d3f0-470b-8f23-0dd0550a842d req-f04c28da-b73e-42be-b322-db32d31e9022 service nova] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Refreshing network info cache for port 85488276-f200-4480-ace3-01452fa9a93f {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1003.883711] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] There are 45 instances to clean {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1003.884078] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 8ab6e3c6-38f7-41ee-b2e5-1c07b442d61a] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1003.924148] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1003.924647] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1003.924756] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Deleting the datastore file [datastore1] 493e7ea0-b160-4cd0-ae0b-bc46e4d89217 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1003.925997] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed4055e2-0f9b-4a33-ab97-7f1e6ff39e6b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.935434] env[62627]: DEBUG oslo_vmware.api [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for the task: (returnval){ [ 1003.935434] env[62627]: value = "task-2194725" [ 1003.935434] env[62627]: _type = "Task" [ 1003.935434] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.946480] env[62627]: DEBUG oslo_vmware.api [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194725, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.155538] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194724, 'name': CreateVM_Task, 'duration_secs': 0.360075} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.155770] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.156429] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.156598] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.156926] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1004.157201] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-924bb23e-e260-4542-be1f-fed72e296fc1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.162486] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for the task: (returnval){ [ 1004.162486] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521ebead-bcd4-09e8-e457-f3391ecbbb90" [ 1004.162486] env[62627]: _type = "Task" [ 1004.162486] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.171522] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521ebead-bcd4-09e8-e457-f3391ecbbb90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.390526] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 5d288606-02b6-4d11-b285-7325e48fb5b3] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1004.450483] env[62627]: DEBUG oslo_vmware.api [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Task: {'id': task-2194725, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168861} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.452924] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.453146] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1004.453534] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1004.453703] env[62627]: INFO nova.compute.manager [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Took 2.06 seconds to destroy the instance on the hypervisor. [ 1004.454586] env[62627]: DEBUG oslo.service.loopingcall [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.454732] env[62627]: DEBUG nova.compute.manager [-] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1004.454876] env[62627]: DEBUG nova.network.neutron [-] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1004.613265] env[62627]: DEBUG nova.network.neutron [req-6b803f09-d3f0-470b-8f23-0dd0550a842d req-f04c28da-b73e-42be-b322-db32d31e9022 service nova] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Updated VIF entry in instance network info cache for port 85488276-f200-4480-ace3-01452fa9a93f. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1004.614045] env[62627]: DEBUG nova.network.neutron [req-6b803f09-d3f0-470b-8f23-0dd0550a842d req-f04c28da-b73e-42be-b322-db32d31e9022 service nova] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Updating instance_info_cache with network_info: [{"id": "85488276-f200-4480-ace3-01452fa9a93f", "address": "fa:16:3e:bb:66:62", "network": {"id": "bb9e36ff-b937-43e9-b4a0-16e4905379f9", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1535008076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8bbc8b42b7b40f8aff57e0f5d9340b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85488276-f2", "ovs_interfaceid": "85488276-f200-4480-ace3-01452fa9a93f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.678255] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521ebead-bcd4-09e8-e457-f3391ecbbb90, 'name': SearchDatastore_Task, 'duration_secs': 0.01069} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.681069] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.681323] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.681568] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.681883] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.681883] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.682378] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d97a166-eab9-49f7-9b96-cd1920e1de62 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.692995] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.693225] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1004.696715] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b47e939-0761-411a-ac7c-d5abd0827a23 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.704075] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for the task: (returnval){ [ 1004.704075] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b1e2cd-3c1f-842d-172d-7c74b6ce30a3" [ 1004.704075] env[62627]: _type = "Task" [ 1004.704075] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.715906] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b1e2cd-3c1f-842d-172d-7c74b6ce30a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.751551] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7420c599-e258-4eb6-94fd-383bb6688485 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.761020] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23fd1e5-e145-4398-85ff-fa264a68a0a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.798243] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd59abf-565a-40ed-a0a9-93782bb4c0fe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.802043] env[62627]: DEBUG nova.compute.manager [req-2c1810fc-992e-4c80-85db-64dd66db8342 req-0194dc33-54df-45f5-98b8-bae56bb1810e service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Received event network-vif-deleted-2277dd85-8be7-4a0e-852b-451705959e88 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1004.802242] env[62627]: INFO nova.compute.manager [req-2c1810fc-992e-4c80-85db-64dd66db8342 req-0194dc33-54df-45f5-98b8-bae56bb1810e service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Neutron deleted interface 2277dd85-8be7-4a0e-852b-451705959e88; detaching it from the instance and deleting it from the info cache [ 1004.802491] env[62627]: DEBUG nova.network.neutron [req-2c1810fc-992e-4c80-85db-64dd66db8342 req-0194dc33-54df-45f5-98b8-bae56bb1810e service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Updating instance_info_cache with network_info: [{"id": "513e35e4-7ec3-4b36-82b0-b70d98706831", "address": "fa:16:3e:88:e9:7b", "network": {"id": "b37d9615-846a-4440-8895-dc51781f4d1e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-886296704", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "ecf9c58481c14634b2315248118ed1b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513e35e4-7e", "ovs_interfaceid": "513e35e4-7ec3-4b36-82b0-b70d98706831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.810676] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba67922-b6db-42ea-88b5-c32e4f0b3254 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.825817] env[62627]: DEBUG nova.compute.provider_tree [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.895840] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: c560b907-500b-42fd-851b-61a04472b660] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1005.121866] env[62627]: DEBUG oslo_concurrency.lockutils [req-6b803f09-d3f0-470b-8f23-0dd0550a842d req-f04c28da-b73e-42be-b322-db32d31e9022 service nova] Releasing lock "refresh_cache-9adb2baf-315f-4122-a657-cb99757002ba" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.215011] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b1e2cd-3c1f-842d-172d-7c74b6ce30a3, 'name': SearchDatastore_Task, 'duration_secs': 0.010527} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.215931] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f79a9483-cf8c-498c-960d-b9d949aeb09f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.221794] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for the task: (returnval){ [ 1005.221794] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d66a1b-71dc-01bf-b7f6-c862220c18a9" [ 1005.221794] env[62627]: _type = "Task" [ 1005.221794] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.230164] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d66a1b-71dc-01bf-b7f6-c862220c18a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.310656] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ed663901-b92e-4b99-ba26-94085889a735 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.321804] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2401d9-bf4b-4272-a047-9ac9715e8677 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.333193] env[62627]: DEBUG nova.scheduler.client.report [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1005.361667] env[62627]: DEBUG nova.compute.manager [req-2c1810fc-992e-4c80-85db-64dd66db8342 req-0194dc33-54df-45f5-98b8-bae56bb1810e service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Detach interface failed, port_id=2277dd85-8be7-4a0e-852b-451705959e88, reason: Instance 493e7ea0-b160-4cd0-ae0b-bc46e4d89217 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1005.399562] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 4e65e39e-3b0b-4c71-87d7-e1ed663ada4a] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1005.526058] env[62627]: DEBUG nova.network.neutron [-] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.732809] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d66a1b-71dc-01bf-b7f6-c862220c18a9, 'name': SearchDatastore_Task, 'duration_secs': 0.01022} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.733093] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.733352] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 9adb2baf-315f-4122-a657-cb99757002ba/9adb2baf-315f-4122-a657-cb99757002ba.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1005.733934] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7fa400a-f7ab-453b-b5fd-b2f860cc9523 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.740428] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for the task: (returnval){ [ 1005.740428] env[62627]: value = "task-2194726" [ 1005.740428] env[62627]: _type = "Task" [ 1005.740428] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.748314] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194726, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.837986] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.838517] env[62627]: DEBUG nova.compute.manager [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1005.841334] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.872s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.841536] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.843770] env[62627]: DEBUG oslo_concurrency.lockutils [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.693s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.843958] env[62627]: DEBUG oslo_concurrency.lockutils [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.845743] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.207s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.847192] env[62627]: INFO nova.compute.claims [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1005.871359] env[62627]: INFO nova.scheduler.client.report [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Deleted allocations for instance 1129349d-65db-4e1d-bb08-cea793adf996 [ 1005.876704] env[62627]: INFO nova.scheduler.client.report [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleted allocations for instance b1ba58c1-340f-486a-8f47-86fbc7b27485 [ 1005.903263] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 38ab16cc-be4f-47b4-a818-ea864e853872] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1006.030986] env[62627]: INFO nova.compute.manager [-] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Took 1.58 seconds to deallocate network for instance. [ 1006.251031] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194726, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457552} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.251305] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 9adb2baf-315f-4122-a657-cb99757002ba/9adb2baf-315f-4122-a657-cb99757002ba.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.251507] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.251755] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1eb8973b-de72-48d9-87c9-3caf310740f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.258641] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for the task: (returnval){ [ 1006.258641] env[62627]: value = "task-2194727" [ 1006.258641] env[62627]: _type = "Task" [ 1006.258641] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.266750] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.351883] env[62627]: DEBUG nova.compute.utils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1006.355027] env[62627]: DEBUG nova.compute.manager [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1006.355220] env[62627]: DEBUG nova.network.neutron [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1006.386542] env[62627]: DEBUG oslo_concurrency.lockutils [None req-49e07426-1413-412e-b578-daf14e725031 tempest-ServerRescueTestJSON-1890721172 tempest-ServerRescueTestJSON-1890721172-project-member] Lock "1129349d-65db-4e1d-bb08-cea793adf996" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.201s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.387658] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e74315bb-ef07-4a11-b4d2-6fa8e5f27d04 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "b1ba58c1-340f-486a-8f47-86fbc7b27485" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.774s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.403596] env[62627]: DEBUG nova.policy [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1253a87d708448aba8d97fcabe137853', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c9263b20f714274a55b88a8bdbd5251', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1006.405874] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: d0786233-087b-4000-8c37-fd5c55cc0ef4] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1006.538667] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.693858] env[62627]: DEBUG nova.network.neutron [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Successfully created port: 21eb745b-bf3f-4539-b50e-df8c6ff07be1 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1006.769942] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063094} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.770566] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1006.771525] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbed63d-b7ae-463f-b338-607a4eacea61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.800797] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 9adb2baf-315f-4122-a657-cb99757002ba/9adb2baf-315f-4122-a657-cb99757002ba.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.801114] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5302e5bf-eb8f-43d7-bfb1-a62687032b0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.820724] env[62627]: DEBUG nova.compute.manager [req-f1b22c88-4c5c-4110-8fbe-6542e153591b req-bf00d5a4-43de-4816-a70c-d21636d6cd21 service nova] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Received event network-vif-deleted-513e35e4-7ec3-4b36-82b0-b70d98706831 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1006.822653] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for the task: (returnval){ [ 1006.822653] env[62627]: value = "task-2194728" [ 1006.822653] env[62627]: _type = "Task" [ 1006.822653] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.831900] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194728, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.858675] env[62627]: DEBUG nova.compute.manager [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1006.908605] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 7804914c-7df8-45c2-a934-67aa73afb2e2] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1007.196990] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "c1fd6afb-9332-446d-a441-5942b60edc94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.197160] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "c1fd6afb-9332-446d-a441-5942b60edc94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.197361] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "c1fd6afb-9332-446d-a441-5942b60edc94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.197559] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "c1fd6afb-9332-446d-a441-5942b60edc94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.197750] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "c1fd6afb-9332-446d-a441-5942b60edc94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.200276] env[62627]: INFO nova.compute.manager [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Terminating instance [ 1007.235627] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fdf80a-fd07-48ea-aea1-1541998ffc1b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.243999] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33329c08-625a-4746-89bb-9c941b58e60c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.275535] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16f77e2-1b48-4d3a-8667-79c8e1ec4d7d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.283553] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19631ae8-87c0-4b85-a9a4-68d97d26fcb9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.298963] env[62627]: DEBUG nova.compute.provider_tree [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.333831] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194728, 'name': ReconfigVM_Task, 'duration_secs': 0.345628} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.334127] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 9adb2baf-315f-4122-a657-cb99757002ba/9adb2baf-315f-4122-a657-cb99757002ba.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1007.334777] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f03b89d-f45e-4832-8bd6-7980b91247c6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.342322] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for the task: (returnval){ [ 1007.342322] env[62627]: value = "task-2194729" [ 1007.342322] env[62627]: _type = "Task" [ 1007.342322] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.350745] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194729, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.410944] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: c9088ac9-456b-4c92-8773-79c263e7cc2a] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1007.707258] env[62627]: DEBUG nova.compute.manager [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1007.707618] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.708457] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e74fe7-2e5c-48a9-8923-222d52242847 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.718171] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.718426] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6756512d-370c-47e4-b812-526de64a092a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.726125] env[62627]: DEBUG oslo_vmware.api [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 1007.726125] env[62627]: value = "task-2194730" [ 1007.726125] env[62627]: _type = "Task" [ 1007.726125] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.735328] env[62627]: DEBUG oslo_vmware.api [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194730, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.802615] env[62627]: DEBUG nova.scheduler.client.report [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1007.853483] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194729, 'name': Rename_Task, 'duration_secs': 0.15867} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.853768] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1007.854018] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b00d780-1f4c-421b-b0ad-d404fbf184a4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.861735] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for the task: (returnval){ [ 1007.861735] env[62627]: value = "task-2194731" [ 1007.861735] env[62627]: _type = "Task" [ 1007.861735] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.872262] env[62627]: DEBUG nova.compute.manager [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1007.874669] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194731, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.905659] env[62627]: DEBUG nova.virt.hardware [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1007.906042] env[62627]: DEBUG nova.virt.hardware [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1007.906488] env[62627]: DEBUG nova.virt.hardware [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1007.906488] env[62627]: DEBUG nova.virt.hardware [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1007.906702] env[62627]: DEBUG nova.virt.hardware [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1007.906813] env[62627]: DEBUG nova.virt.hardware [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1007.907050] env[62627]: DEBUG nova.virt.hardware [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1007.907279] env[62627]: DEBUG nova.virt.hardware [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1007.907560] env[62627]: DEBUG nova.virt.hardware [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1007.907742] env[62627]: DEBUG nova.virt.hardware [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1007.907973] env[62627]: DEBUG nova.virt.hardware [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1007.908973] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca37876-986f-4fd5-9bda-36e988559985 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.914273] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 87bc4af9-7cbc-4797-8aaa-cf5f8ba51218] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1007.921470] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803ff03e-cbbe-4489-900e-70ff8b09fabd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.238858] env[62627]: DEBUG oslo_vmware.api [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194730, 'name': PowerOffVM_Task, 'duration_secs': 0.22216} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.239103] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.239297] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.239593] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae6eb651-5c73-4c65-a7b3-3ec40f8b6cbe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.312666] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.313682] env[62627]: DEBUG nova.compute.manager [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1008.317766] env[62627]: DEBUG oslo_concurrency.lockutils [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.646s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.318036] env[62627]: DEBUG oslo_concurrency.lockutils [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.321337] env[62627]: DEBUG oslo_concurrency.lockutils [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.393s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.321540] env[62627]: DEBUG oslo_concurrency.lockutils [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.324052] env[62627]: DEBUG oslo_concurrency.lockutils [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.844s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.324271] env[62627]: DEBUG nova.objects.instance [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lazy-loading 'resources' on Instance uuid a397bce6-30c7-485d-81ab-88a8e021996c {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.329030] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.329030] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.329030] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleting the datastore file [datastore2] c1fd6afb-9332-446d-a441-5942b60edc94 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.329030] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-882f5384-6886-4f99-8ddf-1d4f15b00a54 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.336813] env[62627]: DEBUG oslo_vmware.api [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 1008.336813] env[62627]: value = "task-2194733" [ 1008.336813] env[62627]: _type = "Task" [ 1008.336813] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.346869] env[62627]: DEBUG oslo_vmware.api [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194733, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.358639] env[62627]: INFO nova.scheduler.client.report [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Deleted allocations for instance 892fc84b-02a9-4fa0-81b5-80326a060c64 [ 1008.363274] env[62627]: INFO nova.scheduler.client.report [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Deleted allocations for instance 0de040e4-8896-4f02-9284-f68100254631 [ 1008.379612] env[62627]: DEBUG oslo_vmware.api [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194731, 'name': PowerOnVM_Task, 'duration_secs': 0.500567} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.379612] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1008.379862] env[62627]: INFO nova.compute.manager [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Took 6.86 seconds to spawn the instance on the hypervisor. [ 1008.379961] env[62627]: DEBUG nova.compute.manager [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1008.381045] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffcb9cdc-801f-43e2-8be7-2f670982c116 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.396016] env[62627]: DEBUG nova.network.neutron [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Successfully updated port: 21eb745b-bf3f-4539-b50e-df8c6ff07be1 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.421052] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: e76bdb57-4a30-4222-ad4c-d95e4e6193cb] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1008.829124] env[62627]: DEBUG nova.compute.utils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1008.833080] env[62627]: DEBUG nova.compute.manager [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1008.833872] env[62627]: DEBUG nova.network.neutron [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1008.846725] env[62627]: DEBUG oslo_vmware.api [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194733, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.454195} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.847611] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.847800] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.847978] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.848162] env[62627]: INFO nova.compute.manager [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1008.848402] env[62627]: DEBUG oslo.service.loopingcall [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1008.848590] env[62627]: DEBUG nova.compute.manager [-] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1008.848682] env[62627]: DEBUG nova.network.neutron [-] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1008.877526] env[62627]: DEBUG oslo_concurrency.lockutils [None req-373561da-fdef-442a-9eb3-d20cfdbb24c1 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "892fc84b-02a9-4fa0-81b5-80326a060c64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.486s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.878481] env[62627]: DEBUG oslo_concurrency.lockutils [None req-798bf817-78f4-4d2b-a86e-1b269286aa0d tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "0de040e4-8896-4f02-9284-f68100254631" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.204s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.892046] env[62627]: DEBUG nova.compute.manager [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Received event network-vif-plugged-21eb745b-bf3f-4539-b50e-df8c6ff07be1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1008.892286] env[62627]: DEBUG oslo_concurrency.lockutils [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] Acquiring lock "07b08283-cbe5-4c36-9127-c423b259643d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.892562] env[62627]: DEBUG oslo_concurrency.lockutils [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] Lock "07b08283-cbe5-4c36-9127-c423b259643d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.892727] env[62627]: DEBUG oslo_concurrency.lockutils [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] Lock "07b08283-cbe5-4c36-9127-c423b259643d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.892847] env[62627]: DEBUG nova.compute.manager [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] No waiting events found dispatching network-vif-plugged-21eb745b-bf3f-4539-b50e-df8c6ff07be1 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1008.893042] env[62627]: WARNING nova.compute.manager [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Received unexpected event network-vif-plugged-21eb745b-bf3f-4539-b50e-df8c6ff07be1 for instance with vm_state building and task_state spawning. [ 1008.893400] env[62627]: DEBUG nova.compute.manager [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Received event network-changed-21eb745b-bf3f-4539-b50e-df8c6ff07be1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1008.893537] env[62627]: DEBUG nova.compute.manager [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Refreshing instance network info cache due to event network-changed-21eb745b-bf3f-4539-b50e-df8c6ff07be1. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1008.893723] env[62627]: DEBUG oslo_concurrency.lockutils [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] Acquiring lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.893864] env[62627]: DEBUG oslo_concurrency.lockutils [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] Acquired lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.894030] env[62627]: DEBUG nova.network.neutron [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Refreshing network info cache for port 21eb745b-bf3f-4539-b50e-df8c6ff07be1 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1008.905423] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.907795] env[62627]: DEBUG nova.policy [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d6a2c615f034c49836031736531061c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01b0f3fe63f64e1183acc0fec83bb3f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1008.909409] env[62627]: INFO nova.compute.manager [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Took 42.03 seconds to build instance. [ 1008.924598] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: f1b70379-e13d-4007-ac2d-3d2d43cd5602] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1009.159344] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87e4c08-4627-43ec-b4ab-4ff852dc06e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.176147] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e014d6-94f7-48c1-b10f-52fdece1134b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.220335] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2eda08-3d74-4d70-93e9-204fe55b389b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.230233] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0278e283-23bd-4eb2-a890-f967d10a02c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.246715] env[62627]: DEBUG nova.compute.provider_tree [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.250794] env[62627]: DEBUG nova.compute.manager [req-1cfb8192-0963-462c-9e57-7e96acf42109 req-6f9ee52d-f8e3-4630-a7ab-dbfe51dbb875 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Received event network-vif-deleted-fee6b373-fb16-4c75-8b2d-d4df6e7e1952 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1009.250980] env[62627]: INFO nova.compute.manager [req-1cfb8192-0963-462c-9e57-7e96acf42109 req-6f9ee52d-f8e3-4630-a7ab-dbfe51dbb875 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Neutron deleted interface fee6b373-fb16-4c75-8b2d-d4df6e7e1952; detaching it from the instance and deleting it from the info cache [ 1009.251167] env[62627]: DEBUG nova.network.neutron [req-1cfb8192-0963-462c-9e57-7e96acf42109 req-6f9ee52d-f8e3-4630-a7ab-dbfe51dbb875 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.320750] env[62627]: DEBUG nova.network.neutron [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Successfully created port: 28c58084-5681-433c-a2ce-043547a7f315 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1009.336115] env[62627]: DEBUG nova.compute.manager [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1009.414273] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c61faa2-19c8-40f2-94d8-0f5e5fdb5841 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Lock "9adb2baf-315f-4122-a657-cb99757002ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.537s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.425599] env[62627]: DEBUG nova.network.neutron [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1009.431113] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 02216be1-cdb9-421a-8a23-4a919b376595] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1009.504062] env[62627]: DEBUG nova.network.neutron [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.630153] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.630153] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.720627] env[62627]: DEBUG nova.network.neutron [-] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.750321] env[62627]: DEBUG nova.scheduler.client.report [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1009.755435] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6adfbbbe-bfba-4e94-9a93-9b2c4eccd28d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.766956] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1513be76-2426-48d3-a68d-55cba559693c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.806229] env[62627]: DEBUG nova.compute.manager [req-1cfb8192-0963-462c-9e57-7e96acf42109 req-6f9ee52d-f8e3-4630-a7ab-dbfe51dbb875 service nova] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Detach interface failed, port_id=fee6b373-fb16-4c75-8b2d-d4df6e7e1952, reason: Instance c1fd6afb-9332-446d-a441-5942b60edc94 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1009.933967] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 574293e3-8ec8-410f-8a30-83bc60d25d64] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1010.006508] env[62627]: DEBUG oslo_concurrency.lockutils [req-31e9f9bc-17d0-457b-b36a-916c42ca2da2 req-cc80b52e-94a8-4e0b-aa1b-0296dcea62be service nova] Releasing lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.006871] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.007043] env[62627]: DEBUG nova.network.neutron [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1010.133615] env[62627]: DEBUG nova.compute.manager [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1010.227623] env[62627]: INFO nova.compute.manager [-] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Took 1.38 seconds to deallocate network for instance. [ 1010.234260] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquiring lock "9adb2baf-315f-4122-a657-cb99757002ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.234260] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Lock "9adb2baf-315f-4122-a657-cb99757002ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.234418] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquiring lock "9adb2baf-315f-4122-a657-cb99757002ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.234456] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Lock "9adb2baf-315f-4122-a657-cb99757002ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.234619] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Lock "9adb2baf-315f-4122-a657-cb99757002ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.236197] env[62627]: INFO nova.compute.manager [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Terminating instance [ 1010.255164] env[62627]: DEBUG oslo_concurrency.lockutils [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.931s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.257550] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.856s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.259105] env[62627]: INFO nova.compute.claims [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1010.277489] env[62627]: INFO nova.scheduler.client.report [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Deleted allocations for instance a397bce6-30c7-485d-81ab-88a8e021996c [ 1010.345194] env[62627]: DEBUG nova.compute.manager [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1010.373128] env[62627]: DEBUG nova.virt.hardware [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='b8a4838c1560aaceb690ef74d8f835ae',container_format='bare',created_at=2025-01-30T20:35:10Z,direct_url=,disk_format='vmdk',id=aeba0208-712f-4f27-adb0-2e5cef3d30a3,min_disk=1,min_ram=0,name='tempest-test-snap-817471544',owner='01b0f3fe63f64e1183acc0fec83bb3f5',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-01-30T20:35:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1010.373419] env[62627]: DEBUG nova.virt.hardware [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1010.373601] env[62627]: DEBUG nova.virt.hardware [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1010.373864] env[62627]: DEBUG nova.virt.hardware [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1010.374025] env[62627]: DEBUG nova.virt.hardware [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1010.374180] env[62627]: DEBUG nova.virt.hardware [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1010.374384] env[62627]: DEBUG nova.virt.hardware [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1010.374544] env[62627]: DEBUG nova.virt.hardware [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1010.374708] env[62627]: DEBUG nova.virt.hardware [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1010.374884] env[62627]: DEBUG nova.virt.hardware [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1010.375090] env[62627]: DEBUG nova.virt.hardware [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1010.375967] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7016d4d5-d4d0-459a-a01a-626a38cb6282 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.385507] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432e9911-d80a-4c88-836f-d672aae12592 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.420163] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "ab999825-4310-4fd8-81f0-6bb5dde4175b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.420301] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.420503] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "ab999825-4310-4fd8-81f0-6bb5dde4175b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.420686] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.420852] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.422826] env[62627]: INFO nova.compute.manager [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Terminating instance [ 1010.437055] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 4c88fbb7-6e54-4ae6-a6df-e77e4d0202d8] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1010.537839] env[62627]: DEBUG nova.network.neutron [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1010.652343] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.661061] env[62627]: DEBUG nova.network.neutron [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Updating instance_info_cache with network_info: [{"id": "21eb745b-bf3f-4539-b50e-df8c6ff07be1", "address": "fa:16:3e:d0:49:53", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21eb745b-bf", "ovs_interfaceid": "21eb745b-bf3f-4539-b50e-df8c6ff07be1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.732706] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.739806] env[62627]: DEBUG nova.compute.manager [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1010.740029] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1010.740892] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d033d828-e5af-479f-831f-12ce4f926c1d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.749503] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1010.749748] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0cbfbdd8-415f-4bde-8879-7105b47733ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.757533] env[62627]: DEBUG oslo_vmware.api [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for the task: (returnval){ [ 1010.757533] env[62627]: value = "task-2194734" [ 1010.757533] env[62627]: _type = "Task" [ 1010.757533] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.767520] env[62627]: DEBUG oslo_vmware.api [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194734, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.786595] env[62627]: DEBUG oslo_concurrency.lockutils [None req-55fd1ae7-7d25-4b8a-be92-dc0f872d015e tempest-VolumesAdminNegativeTest-1197893955 tempest-VolumesAdminNegativeTest-1197893955-project-member] Lock "a397bce6-30c7-485d-81ab-88a8e021996c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.327s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.928232] env[62627]: DEBUG nova.compute.manager [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1010.928507] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1010.929451] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c5eb51-454d-43f6-bf63-34ac499489ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.943755] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 5f0240da-bec1-4bf3-a975-6ed3d0630a2d] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1010.952805] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1010.954237] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59d4ad5b-c9fa-4608-9dc4-8ddae0b5018c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.964725] env[62627]: DEBUG oslo_vmware.api [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 1010.964725] env[62627]: value = "task-2194735" [ 1010.964725] env[62627]: _type = "Task" [ 1010.964725] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.974247] env[62627]: DEBUG oslo_vmware.api [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194735, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.979465] env[62627]: DEBUG nova.compute.manager [req-7d3a751e-b9e3-47e1-a79d-e545c34539f3 req-94f73589-ede7-4293-bbfc-65f7ef50fb29 service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Received event network-vif-plugged-28c58084-5681-433c-a2ce-043547a7f315 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1010.979809] env[62627]: DEBUG oslo_concurrency.lockutils [req-7d3a751e-b9e3-47e1-a79d-e545c34539f3 req-94f73589-ede7-4293-bbfc-65f7ef50fb29 service nova] Acquiring lock "7184a7d2-72c6-48f2-b125-880f1f9858c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.980023] env[62627]: DEBUG oslo_concurrency.lockutils [req-7d3a751e-b9e3-47e1-a79d-e545c34539f3 req-94f73589-ede7-4293-bbfc-65f7ef50fb29 service nova] Lock "7184a7d2-72c6-48f2-b125-880f1f9858c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.980260] env[62627]: DEBUG oslo_concurrency.lockutils [req-7d3a751e-b9e3-47e1-a79d-e545c34539f3 req-94f73589-ede7-4293-bbfc-65f7ef50fb29 service nova] Lock "7184a7d2-72c6-48f2-b125-880f1f9858c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.980810] env[62627]: DEBUG nova.compute.manager [req-7d3a751e-b9e3-47e1-a79d-e545c34539f3 req-94f73589-ede7-4293-bbfc-65f7ef50fb29 service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] No waiting events found dispatching network-vif-plugged-28c58084-5681-433c-a2ce-043547a7f315 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1010.981156] env[62627]: WARNING nova.compute.manager [req-7d3a751e-b9e3-47e1-a79d-e545c34539f3 req-94f73589-ede7-4293-bbfc-65f7ef50fb29 service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Received unexpected event network-vif-plugged-28c58084-5681-433c-a2ce-043547a7f315 for instance with vm_state building and task_state spawning. [ 1011.164120] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.164580] env[62627]: DEBUG nova.compute.manager [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Instance network_info: |[{"id": "21eb745b-bf3f-4539-b50e-df8c6ff07be1", "address": "fa:16:3e:d0:49:53", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21eb745b-bf", "ovs_interfaceid": "21eb745b-bf3f-4539-b50e-df8c6ff07be1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1011.165151] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:49:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99be9a5e-b3f9-4e6c-83d5-df11f817847d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '21eb745b-bf3f-4539-b50e-df8c6ff07be1', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1011.173925] env[62627]: DEBUG oslo.service.loopingcall [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.174253] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1011.174528] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21ef9931-c90c-406a-be69-557c1d697029 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.194923] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1011.194923] env[62627]: value = "task-2194736" [ 1011.194923] env[62627]: _type = "Task" [ 1011.194923] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.203748] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194736, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.234246] env[62627]: DEBUG nova.network.neutron [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Successfully updated port: 28c58084-5681-433c-a2ce-043547a7f315 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1011.272700] env[62627]: DEBUG oslo_vmware.api [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194734, 'name': PowerOffVM_Task, 'duration_secs': 0.206204} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.273253] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1011.273427] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1011.273686] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c79695e-758a-4b3c-a772-72cafb2030b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.342958] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1011.342958] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1011.342958] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Deleting the datastore file [datastore2] 9adb2baf-315f-4122-a657-cb99757002ba {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1011.342958] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dea5e6d1-bf9a-453d-8a49-144177d4db53 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.350180] env[62627]: DEBUG oslo_vmware.api [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for the task: (returnval){ [ 1011.350180] env[62627]: value = "task-2194738" [ 1011.350180] env[62627]: _type = "Task" [ 1011.350180] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.362653] env[62627]: DEBUG oslo_vmware.api [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194738, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.454497] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: d28285ed-2ec0-43a4-bd34-82d33816fea4] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1011.477023] env[62627]: DEBUG oslo_vmware.api [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194735, 'name': PowerOffVM_Task, 'duration_secs': 0.224211} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.477450] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1011.477730] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1011.478071] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6430c362-3c45-49c0-aaed-89b2c3600203 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.555207] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1011.556412] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1011.556412] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Deleting the datastore file [datastore2] ab999825-4310-4fd8-81f0-6bb5dde4175b {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1011.559015] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffc7acbe-b1d6-4d60-bd57-c99b06ba2ae2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.567536] env[62627]: DEBUG oslo_vmware.api [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for the task: (returnval){ [ 1011.567536] env[62627]: value = "task-2194740" [ 1011.567536] env[62627]: _type = "Task" [ 1011.567536] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.575216] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30fe716-5c85-4a43-92d2-07dbdf5b5ce5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.582092] env[62627]: DEBUG oslo_vmware.api [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194740, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.586145] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e872e4f-1632-496f-b88e-67460a3fb912 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.625961] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71c3dcd-992f-4bbb-9a8f-4cba3918bbb6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.634791] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c546087d-1b94-4945-8875-01708ce0b0ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.652089] env[62627]: DEBUG nova.compute.provider_tree [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.704938] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194736, 'name': CreateVM_Task, 'duration_secs': 0.368474} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.705167] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1011.705813] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.705965] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.706347] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1011.706609] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1631ae20-2c1e-4087-9ac1-4b8a9b87935f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.712263] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1011.712263] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52db2c05-cc67-a86a-69bf-7eeabd2126e6" [ 1011.712263] env[62627]: _type = "Task" [ 1011.712263] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.724256] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52db2c05-cc67-a86a-69bf-7eeabd2126e6, 'name': SearchDatastore_Task, 'duration_secs': 0.009234} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.725023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.725369] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.725613] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.725797] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.725983] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.726249] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b9b1b42-4620-4f70-9a43-0a104e48506b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.734673] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.734857] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1011.735598] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9853cf1b-8630-4360-89ba-4a07d79f3a47 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.738077] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "refresh_cache-7184a7d2-72c6-48f2-b125-880f1f9858c7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.738210] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "refresh_cache-7184a7d2-72c6-48f2-b125-880f1f9858c7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.738351] env[62627]: DEBUG nova.network.neutron [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1011.742446] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1011.742446] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3fa79-fc51-5b16-7d5e-30d3a9764d95" [ 1011.742446] env[62627]: _type = "Task" [ 1011.742446] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.750503] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3fa79-fc51-5b16-7d5e-30d3a9764d95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.864140] env[62627]: DEBUG oslo_vmware.api [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Task: {'id': task-2194738, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163044} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.864427] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.864637] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.864818] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.864988] env[62627]: INFO nova.compute.manager [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1011.865311] env[62627]: DEBUG oslo.service.loopingcall [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.865522] env[62627]: DEBUG nova.compute.manager [-] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1011.865638] env[62627]: DEBUG nova.network.neutron [-] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1011.958806] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 32a3b8bb-7c3a-4e30-8a50-37ce53e71882] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1012.079995] env[62627]: DEBUG oslo_vmware.api [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Task: {'id': task-2194740, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135284} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.080276] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.080466] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1012.080682] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1012.080903] env[62627]: INFO nova.compute.manager [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1012.081167] env[62627]: DEBUG oslo.service.loopingcall [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.081361] env[62627]: DEBUG nova.compute.manager [-] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1012.081455] env[62627]: DEBUG nova.network.neutron [-] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1012.155640] env[62627]: DEBUG nova.scheduler.client.report [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1012.254076] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3fa79-fc51-5b16-7d5e-30d3a9764d95, 'name': SearchDatastore_Task, 'duration_secs': 0.008558} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.254873] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a179258-7fd2-4afa-8c43-315d7f1246f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.260828] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1012.260828] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52793048-69d8-4c3f-cc95-976a9e4c06f2" [ 1012.260828] env[62627]: _type = "Task" [ 1012.260828] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.269291] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52793048-69d8-4c3f-cc95-976a9e4c06f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.296500] env[62627]: DEBUG nova.network.neutron [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1012.462816] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: aed62160-2db5-4008-a491-2b27ae81ef5f] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1012.658509] env[62627]: DEBUG nova.compute.manager [req-e347791b-39ab-42c7-bd26-2f101245acfa req-6164c89c-afc6-4d87-b6b2-ff96e6df45e1 service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Received event network-vif-deleted-a7d230b4-8ae5-4c90-8ccf-806a4056e883 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1012.659091] env[62627]: INFO nova.compute.manager [req-e347791b-39ab-42c7-bd26-2f101245acfa req-6164c89c-afc6-4d87-b6b2-ff96e6df45e1 service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Neutron deleted interface a7d230b4-8ae5-4c90-8ccf-806a4056e883; detaching it from the instance and deleting it from the info cache [ 1012.659416] env[62627]: DEBUG nova.network.neutron [req-e347791b-39ab-42c7-bd26-2f101245acfa req-6164c89c-afc6-4d87-b6b2-ff96e6df45e1 service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.664930] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.664930] env[62627]: DEBUG nova.compute.manager [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1012.667660] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.129s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.667824] env[62627]: DEBUG nova.objects.instance [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lazy-loading 'resources' on Instance uuid 493e7ea0-b160-4cd0-ae0b-bc46e4d89217 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.687225] env[62627]: DEBUG nova.network.neutron [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Updating instance_info_cache with network_info: [{"id": "28c58084-5681-433c-a2ce-043547a7f315", "address": "fa:16:3e:1a:ed:0e", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c58084-56", "ovs_interfaceid": "28c58084-5681-433c-a2ce-043547a7f315", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.772331] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52793048-69d8-4c3f-cc95-976a9e4c06f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010301} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.772664] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.773054] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 07b08283-cbe5-4c36-9127-c423b259643d/07b08283-cbe5-4c36-9127-c423b259643d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.773368] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7656525-a1f8-4c4f-8920-f0eb9e5b6444 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.782150] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1012.782150] env[62627]: value = "task-2194741" [ 1012.782150] env[62627]: _type = "Task" [ 1012.782150] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.792817] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.968661] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: cfb3a7a8-ed2b-44b2-aa9c-15e1b1e7f88a] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1012.970943] env[62627]: DEBUG nova.network.neutron [-] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.017651] env[62627]: DEBUG nova.compute.manager [req-649e16d4-fe67-45bc-8d41-c6067d4f83e9 req-61d26be9-215c-4c93-ae9a-7c4a67fc103d service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Received event network-changed-28c58084-5681-433c-a2ce-043547a7f315 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1013.017877] env[62627]: DEBUG nova.compute.manager [req-649e16d4-fe67-45bc-8d41-c6067d4f83e9 req-61d26be9-215c-4c93-ae9a-7c4a67fc103d service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Refreshing instance network info cache due to event network-changed-28c58084-5681-433c-a2ce-043547a7f315. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1013.018089] env[62627]: DEBUG oslo_concurrency.lockutils [req-649e16d4-fe67-45bc-8d41-c6067d4f83e9 req-61d26be9-215c-4c93-ae9a-7c4a67fc103d service nova] Acquiring lock "refresh_cache-7184a7d2-72c6-48f2-b125-880f1f9858c7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.082902] env[62627]: DEBUG nova.network.neutron [-] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.162870] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-203cb2d1-d0af-4459-9e23-b21a5dfd1f87 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.170890] env[62627]: DEBUG nova.compute.utils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1013.176436] env[62627]: DEBUG nova.compute.manager [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1013.176630] env[62627]: DEBUG nova.network.neutron [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1013.181554] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8029055-3736-4c98-8fac-9e7b0dbf836d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.193698] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "refresh_cache-7184a7d2-72c6-48f2-b125-880f1f9858c7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.194017] env[62627]: DEBUG nova.compute.manager [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Instance network_info: |[{"id": "28c58084-5681-433c-a2ce-043547a7f315", "address": "fa:16:3e:1a:ed:0e", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c58084-56", "ovs_interfaceid": "28c58084-5681-433c-a2ce-043547a7f315", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1013.194387] env[62627]: DEBUG oslo_concurrency.lockutils [req-649e16d4-fe67-45bc-8d41-c6067d4f83e9 req-61d26be9-215c-4c93-ae9a-7c4a67fc103d service nova] Acquired lock "refresh_cache-7184a7d2-72c6-48f2-b125-880f1f9858c7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.194593] env[62627]: DEBUG nova.network.neutron [req-649e16d4-fe67-45bc-8d41-c6067d4f83e9 req-61d26be9-215c-4c93-ae9a-7c4a67fc103d service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Refreshing network info cache for port 28c58084-5681-433c-a2ce-043547a7f315 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1013.196461] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:ed:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52f465cb-7418-4172-bd7d-aec00abeb692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28c58084-5681-433c-a2ce-043547a7f315', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1013.205056] env[62627]: DEBUG oslo.service.loopingcall [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1013.211192] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1013.211974] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01c98a5b-481e-45bb-9b68-114a38946ef8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.242436] env[62627]: DEBUG nova.compute.manager [req-e347791b-39ab-42c7-bd26-2f101245acfa req-6164c89c-afc6-4d87-b6b2-ff96e6df45e1 service nova] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Detach interface failed, port_id=a7d230b4-8ae5-4c90-8ccf-806a4056e883, reason: Instance ab999825-4310-4fd8-81f0-6bb5dde4175b could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1013.249888] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1013.249888] env[62627]: value = "task-2194742" [ 1013.249888] env[62627]: _type = "Task" [ 1013.249888] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.261064] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194742, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.293611] env[62627]: DEBUG nova.policy [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a9f3786a06d4fdcb0be2223367aec78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd5dd3e3221f94bd38e8a9677fa91f98e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1013.298971] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194741, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475771} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.301782] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 07b08283-cbe5-4c36-9127-c423b259643d/07b08283-cbe5-4c36-9127-c423b259643d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.302014] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.302470] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-571f8258-9ebb-4750-b2fe-0d25d0dfa9f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.310884] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1013.310884] env[62627]: value = "task-2194743" [ 1013.310884] env[62627]: _type = "Task" [ 1013.310884] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.326583] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194743, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.473573] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: cbd30c37-f6c3-4229-8b95-855cab64924c] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1013.475438] env[62627]: INFO nova.compute.manager [-] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Took 1.61 seconds to deallocate network for instance. [ 1013.507851] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc84707-94c0-4056-86e6-bb17493a4e43 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.516509] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdaefea-6864-47c7-90d0-c05262beed1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.550614] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70ffaca-6e6c-4452-98db-5b4ec29a6736 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.559499] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c73ff7d-2312-430a-9ebc-7aee20ec7964 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.574060] env[62627]: DEBUG nova.compute.provider_tree [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.584496] env[62627]: INFO nova.compute.manager [-] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Took 1.50 seconds to deallocate network for instance. [ 1013.677679] env[62627]: DEBUG nova.compute.manager [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1013.763336] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194742, 'name': CreateVM_Task, 'duration_secs': 0.500713} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.763336] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1013.763336] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.763336] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.763700] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1013.763956] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cae8194-5e85-40d1-ae7c-b749a5c871f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.768706] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1013.768706] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e7b1a6-256c-74f0-b1a4-225526166b4e" [ 1013.768706] env[62627]: _type = "Task" [ 1013.768706] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.777885] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e7b1a6-256c-74f0-b1a4-225526166b4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.798670] env[62627]: DEBUG nova.network.neutron [req-649e16d4-fe67-45bc-8d41-c6067d4f83e9 req-61d26be9-215c-4c93-ae9a-7c4a67fc103d service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Updated VIF entry in instance network info cache for port 28c58084-5681-433c-a2ce-043547a7f315. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1013.798979] env[62627]: DEBUG nova.network.neutron [req-649e16d4-fe67-45bc-8d41-c6067d4f83e9 req-61d26be9-215c-4c93-ae9a-7c4a67fc103d service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Updating instance_info_cache with network_info: [{"id": "28c58084-5681-433c-a2ce-043547a7f315", "address": "fa:16:3e:1a:ed:0e", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c58084-56", "ovs_interfaceid": "28c58084-5681-433c-a2ce-043547a7f315", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.822712] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194743, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071499} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.823447] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1013.824108] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd0a56e-14b9-45b7-86f4-547fbfdc98b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.852125] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 07b08283-cbe5-4c36-9127-c423b259643d/07b08283-cbe5-4c36-9127-c423b259643d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1013.852125] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27ead22d-f183-4f71-ad72-ec4c72a139c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.873097] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1013.873097] env[62627]: value = "task-2194744" [ 1013.873097] env[62627]: _type = "Task" [ 1013.873097] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.882013] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194744, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.926660] env[62627]: DEBUG nova.network.neutron [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Successfully created port: 86cb7370-5805-448b-bd4b-6204a2aa2a76 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1013.978459] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 95ac4fbf-071a-403e-8cae-88d9899757ba] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1013.986380] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.080020] env[62627]: DEBUG nova.scheduler.client.report [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1014.093844] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.279524] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.279783] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Processing image aeba0208-712f-4f27-adb0-2e5cef3d30a3 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1014.280023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3/aeba0208-712f-4f27-adb0-2e5cef3d30a3.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.280175] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3/aeba0208-712f-4f27-adb0-2e5cef3d30a3.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.280354] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1014.280601] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0de65d4-b5d7-4cb6-a5b4-669763540a60 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.289263] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1014.289438] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1014.290114] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63554ba8-ed89-4bc2-9d1e-0d3a9a8ea07a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.295469] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1014.295469] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d2cb24-4f98-4ab8-10b1-f4c570e6ebe5" [ 1014.295469] env[62627]: _type = "Task" [ 1014.295469] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.303480] env[62627]: DEBUG oslo_concurrency.lockutils [req-649e16d4-fe67-45bc-8d41-c6067d4f83e9 req-61d26be9-215c-4c93-ae9a-7c4a67fc103d service nova] Releasing lock "refresh_cache-7184a7d2-72c6-48f2-b125-880f1f9858c7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.303712] env[62627]: DEBUG nova.compute.manager [req-649e16d4-fe67-45bc-8d41-c6067d4f83e9 req-61d26be9-215c-4c93-ae9a-7c4a67fc103d service nova] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Received event network-vif-deleted-85488276-f200-4480-ace3-01452fa9a93f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1014.304065] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d2cb24-4f98-4ab8-10b1-f4c570e6ebe5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.384159] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194744, 'name': ReconfigVM_Task, 'duration_secs': 0.363099} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.384445] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 07b08283-cbe5-4c36-9127-c423b259643d/07b08283-cbe5-4c36-9127-c423b259643d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.385107] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e0323ed-5eac-4026-afb9-b18180c6e30a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.392838] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1014.392838] env[62627]: value = "task-2194745" [ 1014.392838] env[62627]: _type = "Task" [ 1014.392838] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.401222] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194745, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.479997] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: ea289de9-9b2d-4756-a9a5-0e3dfe8db772] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1014.584013] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.916s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.586647] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.934s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.588478] env[62627]: INFO nova.compute.claims [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1014.608465] env[62627]: INFO nova.scheduler.client.report [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Deleted allocations for instance 493e7ea0-b160-4cd0-ae0b-bc46e4d89217 [ 1014.690947] env[62627]: DEBUG nova.compute.manager [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1014.723029] env[62627]: DEBUG nova.virt.hardware [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1014.723315] env[62627]: DEBUG nova.virt.hardware [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1014.723584] env[62627]: DEBUG nova.virt.hardware [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1014.723800] env[62627]: DEBUG nova.virt.hardware [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1014.724153] env[62627]: DEBUG nova.virt.hardware [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1014.724153] env[62627]: DEBUG nova.virt.hardware [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1014.724317] env[62627]: DEBUG nova.virt.hardware [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1014.724476] env[62627]: DEBUG nova.virt.hardware [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1014.724639] env[62627]: DEBUG nova.virt.hardware [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1014.724798] env[62627]: DEBUG nova.virt.hardware [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1014.724973] env[62627]: DEBUG nova.virt.hardware [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1014.726066] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e783afd3-0603-45a1-9f91-1d6d59b44051 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.735239] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a80e662-a9f9-4863-a0c3-08c3f0a50cab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.806295] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Preparing fetch location {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1014.806542] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Fetch image to [datastore2] OSTACK_IMG_581d19cf-7963-4086-b30d-35756c0eb29b/OSTACK_IMG_581d19cf-7963-4086-b30d-35756c0eb29b.vmdk {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1014.806815] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Downloading stream optimized image aeba0208-712f-4f27-adb0-2e5cef3d30a3 to [datastore2] OSTACK_IMG_581d19cf-7963-4086-b30d-35756c0eb29b/OSTACK_IMG_581d19cf-7963-4086-b30d-35756c0eb29b.vmdk on the data store datastore2 as vApp {{(pid=62627) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1014.806935] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Downloading image file data aeba0208-712f-4f27-adb0-2e5cef3d30a3 to the ESX as VM named 'OSTACK_IMG_581d19cf-7963-4086-b30d-35756c0eb29b' {{(pid=62627) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1014.890604] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1014.890604] env[62627]: value = "resgroup-9" [ 1014.890604] env[62627]: _type = "ResourcePool" [ 1014.890604] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1014.890604] env[62627]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-9d3d625b-a302-4ebe-8f21-96cea5e316dc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.915218] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194745, 'name': Rename_Task, 'duration_secs': 0.147112} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.916725] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1014.917031] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lease: (returnval){ [ 1014.917031] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d905f6-bd83-e9ad-ec62-13ae5f13bbd2" [ 1014.917031] env[62627]: _type = "HttpNfcLease" [ 1014.917031] env[62627]: } obtained for vApp import into resource pool (val){ [ 1014.917031] env[62627]: value = "resgroup-9" [ 1014.917031] env[62627]: _type = "ResourcePool" [ 1014.917031] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1014.917246] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the lease: (returnval){ [ 1014.917246] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d905f6-bd83-e9ad-ec62-13ae5f13bbd2" [ 1014.917246] env[62627]: _type = "HttpNfcLease" [ 1014.917246] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1014.917452] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e1aced7-9134-4900-8c08-7a665b4e8734 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.928143] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1014.928143] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d905f6-bd83-e9ad-ec62-13ae5f13bbd2" [ 1014.928143] env[62627]: _type = "HttpNfcLease" [ 1014.928143] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1014.930239] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1014.930239] env[62627]: value = "task-2194747" [ 1014.930239] env[62627]: _type = "Task" [ 1014.930239] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.939982] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194747, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.983829] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 8af5ca34-f9c4-4b68-b0f7-7163f388b778] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1015.116109] env[62627]: DEBUG oslo_concurrency.lockutils [None req-eb3833de-d679-49df-8c4f-61fa547f46fd tempest-ServersTestMultiNic-134641580 tempest-ServersTestMultiNic-134641580-project-member] Lock "493e7ea0-b160-4cd0-ae0b-bc46e4d89217" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.227s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.352462] env[62627]: DEBUG nova.compute.manager [req-6b151bce-2399-49e7-a7d2-55e139e2e8e5 req-2b49f1cb-b66e-4b49-8671-6a51c936a612 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Received event network-vif-plugged-86cb7370-5805-448b-bd4b-6204a2aa2a76 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1015.352702] env[62627]: DEBUG oslo_concurrency.lockutils [req-6b151bce-2399-49e7-a7d2-55e139e2e8e5 req-2b49f1cb-b66e-4b49-8671-6a51c936a612 service nova] Acquiring lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.352855] env[62627]: DEBUG oslo_concurrency.lockutils [req-6b151bce-2399-49e7-a7d2-55e139e2e8e5 req-2b49f1cb-b66e-4b49-8671-6a51c936a612 service nova] Lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.353107] env[62627]: DEBUG oslo_concurrency.lockutils [req-6b151bce-2399-49e7-a7d2-55e139e2e8e5 req-2b49f1cb-b66e-4b49-8671-6a51c936a612 service nova] Lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.353377] env[62627]: DEBUG nova.compute.manager [req-6b151bce-2399-49e7-a7d2-55e139e2e8e5 req-2b49f1cb-b66e-4b49-8671-6a51c936a612 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] No waiting events found dispatching network-vif-plugged-86cb7370-5805-448b-bd4b-6204a2aa2a76 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1015.353624] env[62627]: WARNING nova.compute.manager [req-6b151bce-2399-49e7-a7d2-55e139e2e8e5 req-2b49f1cb-b66e-4b49-8671-6a51c936a612 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Received unexpected event network-vif-plugged-86cb7370-5805-448b-bd4b-6204a2aa2a76 for instance with vm_state building and task_state spawning. [ 1015.430293] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1015.430293] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d905f6-bd83-e9ad-ec62-13ae5f13bbd2" [ 1015.430293] env[62627]: _type = "HttpNfcLease" [ 1015.430293] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1015.430999] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1015.430999] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d905f6-bd83-e9ad-ec62-13ae5f13bbd2" [ 1015.430999] env[62627]: _type = "HttpNfcLease" [ 1015.430999] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1015.435016] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccecf85e-9f6e-467b-841b-1c23ec07272d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.444042] env[62627]: DEBUG oslo_vmware.api [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194747, 'name': PowerOnVM_Task, 'duration_secs': 0.477547} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.447265] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1015.447478] env[62627]: INFO nova.compute.manager [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Took 7.57 seconds to spawn the instance on the hypervisor. [ 1015.447711] env[62627]: DEBUG nova.compute.manager [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1015.448062] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2b520-9964-d387-21f5-3a0b3b83b3b1/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1015.448264] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2b520-9964-d387-21f5-3a0b3b83b3b1/disk-0.vmdk. {{(pid=62627) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1015.450037] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a82887b-311f-4dac-878f-be105f96ac77 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.511922] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: a12855e2-1544-4e85-a7ff-c026fec861b6] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1015.513852] env[62627]: DEBUG nova.network.neutron [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Successfully updated port: 86cb7370-5805-448b-bd4b-6204a2aa2a76 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1015.536706] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4ccb4ab2-7140-4b44-93db-8581ec0dfafe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.883081] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620f5d16-7e90-46b3-b6d3-d2983f6fe55b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.891720] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9b04eb-91c5-4724-a71c-78fc03d09bb2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.933128] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ce3773-398f-4cbe-8e49-8eb800c77a4d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.946827] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ceaa638-a1d7-44ab-aaa2-e34e5c1c7e8a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.968340] env[62627]: DEBUG nova.compute.provider_tree [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.020768] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquiring lock "refresh_cache-58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.020980] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquired lock "refresh_cache-58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.021179] env[62627]: DEBUG nova.network.neutron [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1016.022613] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 16c4065b-83cf-4b1c-92bf-66ce0a75e573] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1016.037777] env[62627]: INFO nova.compute.manager [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Took 40.54 seconds to build instance. [ 1016.336646] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Completed reading data from the image iterator. {{(pid=62627) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1016.336901] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2b520-9964-d387-21f5-3a0b3b83b3b1/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1016.337954] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daa5acd-01d5-4b56-a2f0-fb15789e5f6e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.345637] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2b520-9964-d387-21f5-3a0b3b83b3b1/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1016.345839] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2b520-9964-d387-21f5-3a0b3b83b3b1/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1016.346233] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-2435b0d8-8bb9-4827-8af7-8a3172e8fa6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.475016] env[62627]: DEBUG nova.scheduler.client.report [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1016.521751] env[62627]: DEBUG oslo_vmware.rw_handles [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b2b520-9964-d387-21f5-3a0b3b83b3b1/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1016.522263] env[62627]: INFO nova.virt.vmwareapi.images [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Downloaded image file data aeba0208-712f-4f27-adb0-2e5cef3d30a3 [ 1016.523156] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f441e1-e096-4cdb-a484-d65c970490ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.544673] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: faa16f10-63f0-457c-8cd7-20a624454d86] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1016.546648] env[62627]: DEBUG oslo_concurrency.lockutils [None req-928bd9ac-1f58-4354-8977-49d97828f635 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "07b08283-cbe5-4c36-9127-c423b259643d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.056s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.546952] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1eba187a-6aa6-41a7-83c9-43eb6982213b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.583950] env[62627]: INFO nova.virt.vmwareapi.images [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] The imported VM was unregistered [ 1016.586896] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Caching image {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1016.587161] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating directory with path [datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1016.587434] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2a2fa4b-5d5e-4847-bd6f-6919ac083b5a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.599835] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Created directory with path [datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1016.600038] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_581d19cf-7963-4086-b30d-35756c0eb29b/OSTACK_IMG_581d19cf-7963-4086-b30d-35756c0eb29b.vmdk to [datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3/aeba0208-712f-4f27-adb0-2e5cef3d30a3.vmdk. {{(pid=62627) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1016.600295] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-fa9b67d2-f8c2-4c6c-917d-474ac2a61227 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.603608] env[62627]: DEBUG nova.network.neutron [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1016.610895] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1016.610895] env[62627]: value = "task-2194749" [ 1016.610895] env[62627]: _type = "Task" [ 1016.610895] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.619870] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194749, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.891360] env[62627]: DEBUG nova.network.neutron [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Updating instance_info_cache with network_info: [{"id": "86cb7370-5805-448b-bd4b-6204a2aa2a76", "address": "fa:16:3e:c2:4c:19", "network": {"id": "162ab8ae-2004-4822-ab11-008376eee11f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-571800485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5dd3e3221f94bd38e8a9677fa91f98e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86cb7370-58", "ovs_interfaceid": "86cb7370-5805-448b-bd4b-6204a2aa2a76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.982425] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.396s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.982913] env[62627]: DEBUG nova.compute.manager [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1016.990360] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.256s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.990360] env[62627]: DEBUG nova.objects.instance [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lazy-loading 'resources' on Instance uuid c1fd6afb-9332-446d-a441-5942b60edc94 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.049623] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 2cfcbc3b-c2b9-4155-a48a-0e73eea0589e] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1017.124846] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194749, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.256231] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "07b08283-cbe5-4c36-9127-c423b259643d" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.256550] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "07b08283-cbe5-4c36-9127-c423b259643d" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.256969] env[62627]: INFO nova.compute.manager [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Shelving [ 1017.380115] env[62627]: DEBUG nova.compute.manager [req-44ef6f4a-ddfa-4e41-b41c-7bebb663c08f req-55f1e3ee-0ea3-44be-9cd0-0aa2e9c419a9 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Received event network-changed-86cb7370-5805-448b-bd4b-6204a2aa2a76 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1017.380349] env[62627]: DEBUG nova.compute.manager [req-44ef6f4a-ddfa-4e41-b41c-7bebb663c08f req-55f1e3ee-0ea3-44be-9cd0-0aa2e9c419a9 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Refreshing instance network info cache due to event network-changed-86cb7370-5805-448b-bd4b-6204a2aa2a76. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1017.380866] env[62627]: DEBUG oslo_concurrency.lockutils [req-44ef6f4a-ddfa-4e41-b41c-7bebb663c08f req-55f1e3ee-0ea3-44be-9cd0-0aa2e9c419a9 service nova] Acquiring lock "refresh_cache-58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.394857] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Releasing lock "refresh_cache-58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1017.394857] env[62627]: DEBUG nova.compute.manager [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Instance network_info: |[{"id": "86cb7370-5805-448b-bd4b-6204a2aa2a76", "address": "fa:16:3e:c2:4c:19", "network": {"id": "162ab8ae-2004-4822-ab11-008376eee11f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-571800485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5dd3e3221f94bd38e8a9677fa91f98e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86cb7370-58", "ovs_interfaceid": "86cb7370-5805-448b-bd4b-6204a2aa2a76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1017.394857] env[62627]: DEBUG oslo_concurrency.lockutils [req-44ef6f4a-ddfa-4e41-b41c-7bebb663c08f req-55f1e3ee-0ea3-44be-9cd0-0aa2e9c419a9 service nova] Acquired lock "refresh_cache-58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.397244] env[62627]: DEBUG nova.network.neutron [req-44ef6f4a-ddfa-4e41-b41c-7bebb663c08f req-55f1e3ee-0ea3-44be-9cd0-0aa2e9c419a9 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Refreshing network info cache for port 86cb7370-5805-448b-bd4b-6204a2aa2a76 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1017.397244] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:4c:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de5fcb06-b0d0-467f-86fe-06882165ac31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '86cb7370-5805-448b-bd4b-6204a2aa2a76', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1017.404341] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Creating folder: Project (d5dd3e3221f94bd38e8a9677fa91f98e). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1017.407791] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-620c3ede-5e11-46f6-8006-3fe69c7c1a2d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.423364] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Created folder: Project (d5dd3e3221f94bd38e8a9677fa91f98e) in parent group-v447541. [ 1017.423589] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Creating folder: Instances. Parent ref: group-v447731. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1017.424654] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-519400ce-f8a2-42ff-a714-37465fc1d114 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.440318] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Created folder: Instances in parent group-v447731. [ 1017.440643] env[62627]: DEBUG oslo.service.loopingcall [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1017.440740] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1017.442156] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c38e24db-c049-41ec-b269-398c2e277ea3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.470344] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1017.470344] env[62627]: value = "task-2194752" [ 1017.470344] env[62627]: _type = "Task" [ 1017.470344] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.483409] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194752, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.494569] env[62627]: DEBUG nova.compute.utils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1017.500253] env[62627]: DEBUG nova.compute.manager [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1017.500339] env[62627]: DEBUG nova.network.neutron [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1017.555959] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 83998771-97a7-4458-834e-df1e4b2914ee] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1017.627030] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194749, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.628864] env[62627]: DEBUG nova.policy [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '991f6f2c63da4e5cb3aff0dabe1d5968', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e06a7550ef84edd9861cafa0bfcb857', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1017.859833] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0912491-8a52-4afd-bd8b-6da0dfeec586 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.873535] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa3750e-5926-4296-9c5f-a50c315df280 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.919147] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a97873-5032-4fe4-b23a-c793cd20a976 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.931169] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c5baf3-5d7f-4275-bb2e-9f6f5cc0ed84 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.957541] env[62627]: DEBUG nova.compute.provider_tree [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.979374] env[62627]: DEBUG nova.network.neutron [req-44ef6f4a-ddfa-4e41-b41c-7bebb663c08f req-55f1e3ee-0ea3-44be-9cd0-0aa2e9c419a9 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Updated VIF entry in instance network info cache for port 86cb7370-5805-448b-bd4b-6204a2aa2a76. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1017.981340] env[62627]: DEBUG nova.network.neutron [req-44ef6f4a-ddfa-4e41-b41c-7bebb663c08f req-55f1e3ee-0ea3-44be-9cd0-0aa2e9c419a9 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Updating instance_info_cache with network_info: [{"id": "86cb7370-5805-448b-bd4b-6204a2aa2a76", "address": "fa:16:3e:c2:4c:19", "network": {"id": "162ab8ae-2004-4822-ab11-008376eee11f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-571800485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5dd3e3221f94bd38e8a9677fa91f98e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de5fcb06-b0d0-467f-86fe-06882165ac31", "external-id": "nsx-vlan-transportzone-595", "segmentation_id": 595, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86cb7370-58", "ovs_interfaceid": "86cb7370-5805-448b-bd4b-6204a2aa2a76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.990261] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194752, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.000329] env[62627]: DEBUG nova.compute.manager [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1018.059814] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: fffa23fe-3ce2-4879-b72d-11275d298504] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1018.128019] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194749, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.271303] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.271631] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3695426e-1bb0-4dbd-a697-bd7bcddd4e6d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.283176] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1018.283176] env[62627]: value = "task-2194753" [ 1018.283176] env[62627]: _type = "Task" [ 1018.283176] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.298360] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.387221] env[62627]: DEBUG nova.network.neutron [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Successfully created port: 1dbb6dda-f003-4949-8bd8-ae4bd148a135 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1018.460162] env[62627]: DEBUG nova.scheduler.client.report [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1018.488291] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194752, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.491844] env[62627]: DEBUG oslo_concurrency.lockutils [req-44ef6f4a-ddfa-4e41-b41c-7bebb663c08f req-55f1e3ee-0ea3-44be-9cd0-0aa2e9c419a9 service nova] Releasing lock "refresh_cache-58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.567725] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 76fb58dc-df20-464b-bda0-dcf09d69c718] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1018.628056] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194749, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.797102] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.967102] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.978s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.971198] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.985s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.971486] env[62627]: DEBUG nova.objects.instance [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Lazy-loading 'resources' on Instance uuid 9adb2baf-315f-4122-a657-cb99757002ba {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.989152] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194752, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.000220] env[62627]: INFO nova.scheduler.client.report [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleted allocations for instance c1fd6afb-9332-446d-a441-5942b60edc94 [ 1019.009678] env[62627]: DEBUG nova.compute.manager [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1019.076199] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: f09c685d-9103-4c94-a44a-266aacc334f3] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1019.129070] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194749, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.152129] env[62627]: DEBUG nova.virt.hardware [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1019.152385] env[62627]: DEBUG nova.virt.hardware [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1019.152539] env[62627]: DEBUG nova.virt.hardware [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1019.152714] env[62627]: DEBUG nova.virt.hardware [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1019.152854] env[62627]: DEBUG nova.virt.hardware [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1019.152995] env[62627]: DEBUG nova.virt.hardware [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1019.153215] env[62627]: DEBUG nova.virt.hardware [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1019.153382] env[62627]: DEBUG nova.virt.hardware [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1019.153546] env[62627]: DEBUG nova.virt.hardware [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1019.153706] env[62627]: DEBUG nova.virt.hardware [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1019.153870] env[62627]: DEBUG nova.virt.hardware [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1019.154785] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693f5a3c-2cd7-4d26-9ae8-ca310f735289 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.163807] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b5e5ad-a3be-4e2e-a9d6-1162f613db18 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.297282] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194753, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.484610] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194752, 'name': CreateVM_Task, 'duration_secs': 1.74409} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.484791] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1019.485522] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.485686] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.486016] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1019.486437] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b062507-a940-41df-bdab-acc9e74fe52f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.491932] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for the task: (returnval){ [ 1019.491932] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dde3cd-851f-77d8-b607-f2ad85d97987" [ 1019.491932] env[62627]: _type = "Task" [ 1019.491932] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.501109] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dde3cd-851f-77d8-b607-f2ad85d97987, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.507456] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20c04bbf-d22d-4883-bb4d-6e14ab399e3e tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "c1fd6afb-9332-446d-a441-5942b60edc94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.310s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.583504] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 15799515-ab7d-4e7c-a6e0-c740baf4f93a] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1019.628179] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194749, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.521834} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.628442] env[62627]: INFO nova.virt.vmwareapi.ds_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_581d19cf-7963-4086-b30d-35756c0eb29b/OSTACK_IMG_581d19cf-7963-4086-b30d-35756c0eb29b.vmdk to [datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3/aeba0208-712f-4f27-adb0-2e5cef3d30a3.vmdk. [ 1019.628626] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Cleaning up location [datastore2] OSTACK_IMG_581d19cf-7963-4086-b30d-35756c0eb29b {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1019.628782] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_581d19cf-7963-4086-b30d-35756c0eb29b {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1019.629045] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55d33663-4fe0-43c8-9640-753afa56be0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.637436] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1019.637436] env[62627]: value = "task-2194754" [ 1019.637436] env[62627]: _type = "Task" [ 1019.637436] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.647826] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194754, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.761220] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836b2bea-d416-44ff-8886-544b4e8238bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.769025] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f4cacd-0f4d-48c3-8113-47f2fdd8d93d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.805763] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54128cb1-544c-4b08-ab63-48881d17b146 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.814748] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194753, 'name': PowerOffVM_Task, 'duration_secs': 1.026475} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.817020] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1019.821411] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c261b9-d428-4226-805e-65944237f5a2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.823306] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d0100e-c121-4738-b1f0-2b1a40d29f01 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.838639] env[62627]: DEBUG nova.compute.provider_tree [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.854491] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a2edeb-4245-4973-a9ae-57679ce5fd2b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.868018] env[62627]: DEBUG oslo_concurrency.lockutils [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "7366e4b0-bdba-435d-844d-8e81c4464b31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.868018] env[62627]: DEBUG oslo_concurrency.lockutils [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "7366e4b0-bdba-435d-844d-8e81c4464b31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.868018] env[62627]: DEBUG oslo_concurrency.lockutils [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "7366e4b0-bdba-435d-844d-8e81c4464b31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.868018] env[62627]: DEBUG oslo_concurrency.lockutils [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "7366e4b0-bdba-435d-844d-8e81c4464b31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.868018] env[62627]: DEBUG oslo_concurrency.lockutils [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "7366e4b0-bdba-435d-844d-8e81c4464b31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.868018] env[62627]: INFO nova.compute.manager [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Terminating instance [ 1020.003085] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dde3cd-851f-77d8-b607-f2ad85d97987, 'name': SearchDatastore_Task, 'duration_secs': 0.027964} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.003417] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.003653] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1020.003884] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.004041] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.004227] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1020.004484] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba6c680a-d642-4d03-855b-f3f69124b148 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.013981] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1020.014175] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1020.014888] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14076da3-e64a-44ca-a061-4094a5eed938 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.020755] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for the task: (returnval){ [ 1020.020755] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]524a9f10-6008-fd78-2ac6-0cad94197c9d" [ 1020.020755] env[62627]: _type = "Task" [ 1020.020755] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.027977] env[62627]: DEBUG nova.compute.manager [req-097ce166-c573-4180-8121-3dee0c5ba3c7 req-334ad9fd-e80a-4a59-843b-4a0d8240d455 service nova] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Received event network-vif-plugged-1dbb6dda-f003-4949-8bd8-ae4bd148a135 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1020.028238] env[62627]: DEBUG oslo_concurrency.lockutils [req-097ce166-c573-4180-8121-3dee0c5ba3c7 req-334ad9fd-e80a-4a59-843b-4a0d8240d455 service nova] Acquiring lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.028467] env[62627]: DEBUG oslo_concurrency.lockutils [req-097ce166-c573-4180-8121-3dee0c5ba3c7 req-334ad9fd-e80a-4a59-843b-4a0d8240d455 service nova] Lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.028631] env[62627]: DEBUG oslo_concurrency.lockutils [req-097ce166-c573-4180-8121-3dee0c5ba3c7 req-334ad9fd-e80a-4a59-843b-4a0d8240d455 service nova] Lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.028801] env[62627]: DEBUG nova.compute.manager [req-097ce166-c573-4180-8121-3dee0c5ba3c7 req-334ad9fd-e80a-4a59-843b-4a0d8240d455 service nova] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] No waiting events found dispatching network-vif-plugged-1dbb6dda-f003-4949-8bd8-ae4bd148a135 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1020.028963] env[62627]: WARNING nova.compute.manager [req-097ce166-c573-4180-8121-3dee0c5ba3c7 req-334ad9fd-e80a-4a59-843b-4a0d8240d455 service nova] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Received unexpected event network-vif-plugged-1dbb6dda-f003-4949-8bd8-ae4bd148a135 for instance with vm_state building and task_state spawning. [ 1020.034992] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524a9f10-6008-fd78-2ac6-0cad94197c9d, 'name': SearchDatastore_Task, 'duration_secs': 0.010461} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.040695] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d43a68ec-aedd-4b88-b845-767e955c161d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.046379] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for the task: (returnval){ [ 1020.046379] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5284c6a9-d797-ce5e-2a26-ff5219094828" [ 1020.046379] env[62627]: _type = "Task" [ 1020.046379] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.054747] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5284c6a9-d797-ce5e-2a26-ff5219094828, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.068621] env[62627]: DEBUG nova.network.neutron [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Successfully updated port: 1dbb6dda-f003-4949-8bd8-ae4bd148a135 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1020.090521] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: e5712962-ad8e-4cf2-aa47-275293609a13] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1020.149843] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194754, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152138} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.150120] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.150289] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3/aeba0208-712f-4f27-adb0-2e5cef3d30a3.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.150525] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3/aeba0208-712f-4f27-adb0-2e5cef3d30a3.vmdk to [datastore2] 7184a7d2-72c6-48f2-b125-880f1f9858c7/7184a7d2-72c6-48f2-b125-880f1f9858c7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1020.150778] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2e53151-86d1-4878-9136-2c6d33692854 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.159067] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1020.159067] env[62627]: value = "task-2194755" [ 1020.159067] env[62627]: _type = "Task" [ 1020.159067] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.167665] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.354646] env[62627]: DEBUG nova.scheduler.client.report [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1020.369811] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1020.370212] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b4571710-d5d0-4b5b-a0e7-e70acf667e3c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.373075] env[62627]: DEBUG nova.compute.manager [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1020.374613] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.374613] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349366d0-16ff-4afe-b838-cbff5c6aa4c9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.383911] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1020.385350] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e023071a-c62b-4681-9d62-ca0a1bb68e4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.387107] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1020.387107] env[62627]: value = "task-2194756" [ 1020.387107] env[62627]: _type = "Task" [ 1020.387107] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.397234] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194756, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.398730] env[62627]: DEBUG oslo_vmware.api [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 1020.398730] env[62627]: value = "task-2194757" [ 1020.398730] env[62627]: _type = "Task" [ 1020.398730] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.408334] env[62627]: DEBUG oslo_vmware.api [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194757, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.558103] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5284c6a9-d797-ce5e-2a26-ff5219094828, 'name': SearchDatastore_Task, 'duration_secs': 0.010049} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.558433] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.558717] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37/58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1020.558990] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64951bf7-2d7b-4287-8d5c-a440765343ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.569584] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for the task: (returnval){ [ 1020.569584] env[62627]: value = "task-2194758" [ 1020.569584] env[62627]: _type = "Task" [ 1020.569584] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.573820] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "refresh_cache-bd583f6b-be8d-4f02-a67e-0ea10e6dd087" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.574039] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired lock "refresh_cache-bd583f6b-be8d-4f02-a67e-0ea10e6dd087" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.574199] env[62627]: DEBUG nova.network.neutron [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1020.582372] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.592938] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: f4be6a21-4f4c-4ec7-89ad-5ae2be2389d7] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1020.670647] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194755, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.861592] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.890s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.864203] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.770s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.864350] env[62627]: DEBUG nova.objects.instance [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lazy-loading 'resources' on Instance uuid ab999825-4310-4fd8-81f0-6bb5dde4175b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1020.886974] env[62627]: INFO nova.scheduler.client.report [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Deleted allocations for instance 9adb2baf-315f-4122-a657-cb99757002ba [ 1020.901460] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194756, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.911572] env[62627]: DEBUG oslo_vmware.api [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194757, 'name': PowerOffVM_Task, 'duration_secs': 0.227626} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.911890] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.912100] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.913069] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd2e3bd9-5424-406f-8be2-2bba88dc8fe2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.987781] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.988041] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.988685] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleting the datastore file [datastore1] 7366e4b0-bdba-435d-844d-8e81c4464b31 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.988685] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc83924b-91db-40c2-b912-2695967e32dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.998368] env[62627]: DEBUG oslo_vmware.api [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for the task: (returnval){ [ 1020.998368] env[62627]: value = "task-2194760" [ 1020.998368] env[62627]: _type = "Task" [ 1020.998368] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.009800] env[62627]: DEBUG oslo_vmware.api [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.083030] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.098143] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 7691c2e2-4e43-4990-bf4d-2ef3298a4264] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1021.137031] env[62627]: DEBUG nova.network.neutron [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1021.174601] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194755, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.369307] env[62627]: DEBUG nova.network.neutron [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Updating instance_info_cache with network_info: [{"id": "1dbb6dda-f003-4949-8bd8-ae4bd148a135", "address": "fa:16:3e:ee:3f:26", "network": {"id": "c8f2b47a-fb3f-4fb5-a55a-d527bd4b6f03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2107012038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e06a7550ef84edd9861cafa0bfcb857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dbb6dda-f0", "ovs_interfaceid": "1dbb6dda-f003-4949-8bd8-ae4bd148a135", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.404011] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194756, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.405068] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9e5840cb-5be2-4b36-8e58-0c5896dc6dc5 tempest-ServersNegativeTestMultiTenantJSON-782874356 tempest-ServersNegativeTestMultiTenantJSON-782874356-project-member] Lock "9adb2baf-315f-4122-a657-cb99757002ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.171s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.512049] env[62627]: DEBUG oslo_vmware.api [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Task: {'id': task-2194760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245703} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.512256] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1021.512482] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1021.512683] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1021.512885] env[62627]: INFO nova.compute.manager [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1021.513169] env[62627]: DEBUG oslo.service.loopingcall [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1021.513455] env[62627]: DEBUG nova.compute.manager [-] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1021.513553] env[62627]: DEBUG nova.network.neutron [-] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1021.583252] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.599780] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 63e884c4-2cc1-431d-8a7f-1c4259b29e1a] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1021.655895] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1792c90b-4c0a-4d62-a4fe-2ad0b7b33ddf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.668284] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dede7e1-c523-419e-888b-caaa9ca410f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.674984] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194755, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.706946] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0f56f3-2a21-4050-b859-d2a3dc979b5c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.725520] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15935f97-f797-40fa-b087-64ad2764e4a6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.741258] env[62627]: DEBUG nova.compute.provider_tree [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.875666] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Releasing lock "refresh_cache-bd583f6b-be8d-4f02-a67e-0ea10e6dd087" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.876087] env[62627]: DEBUG nova.compute.manager [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Instance network_info: |[{"id": "1dbb6dda-f003-4949-8bd8-ae4bd148a135", "address": "fa:16:3e:ee:3f:26", "network": {"id": "c8f2b47a-fb3f-4fb5-a55a-d527bd4b6f03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2107012038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e06a7550ef84edd9861cafa0bfcb857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dbb6dda-f0", "ovs_interfaceid": "1dbb6dda-f003-4949-8bd8-ae4bd148a135", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1021.876683] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:3f:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1dbb6dda-f003-4949-8bd8-ae4bd148a135', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1021.885704] env[62627]: DEBUG oslo.service.loopingcall [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1021.886404] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1021.886766] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5dc919c8-a76e-4db4-b08b-04c1bc33d591 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.918426] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194756, 'name': CreateSnapshot_Task, 'duration_secs': 1.483564} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.918681] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1021.918681] env[62627]: value = "task-2194761" [ 1021.918681] env[62627]: _type = "Task" [ 1021.918681] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.919288] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1021.919915] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b6ee45-9eda-4d48-bcb2-535bec622cf6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.940685] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194761, 'name': CreateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.086561] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194758, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.105354] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: e8c87637-4a30-4482-af21-f58ad8a218fc] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1022.110981] env[62627]: DEBUG nova.compute.manager [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Received event network-changed-1dbb6dda-f003-4949-8bd8-ae4bd148a135 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1022.111465] env[62627]: DEBUG nova.compute.manager [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Refreshing instance network info cache due to event network-changed-1dbb6dda-f003-4949-8bd8-ae4bd148a135. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1022.111546] env[62627]: DEBUG oslo_concurrency.lockutils [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] Acquiring lock "refresh_cache-bd583f6b-be8d-4f02-a67e-0ea10e6dd087" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.113115] env[62627]: DEBUG oslo_concurrency.lockutils [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] Acquired lock "refresh_cache-bd583f6b-be8d-4f02-a67e-0ea10e6dd087" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.113115] env[62627]: DEBUG nova.network.neutron [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Refreshing network info cache for port 1dbb6dda-f003-4949-8bd8-ae4bd148a135 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1022.179358] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194755, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.244521] env[62627]: DEBUG nova.scheduler.client.report [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1022.432327] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194761, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.451575] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1022.451950] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9694d735-6d71-4c39-93ed-6fb6afc7f244 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.458898] env[62627]: DEBUG nova.network.neutron [-] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.465619] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1022.465619] env[62627]: value = "task-2194762" [ 1022.465619] env[62627]: _type = "Task" [ 1022.465619] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.478440] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194762, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.583074] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194758, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.611721] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 33bf3932-3746-445b-b0fe-611466cdd9b1] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1022.676174] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194755, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.416321} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.676526] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/aeba0208-712f-4f27-adb0-2e5cef3d30a3/aeba0208-712f-4f27-adb0-2e5cef3d30a3.vmdk to [datastore2] 7184a7d2-72c6-48f2-b125-880f1f9858c7/7184a7d2-72c6-48f2-b125-880f1f9858c7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1022.677404] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb7ba84-7278-4b41-af6d-000385109e8d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.710884] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 7184a7d2-72c6-48f2-b125-880f1f9858c7/7184a7d2-72c6-48f2-b125-880f1f9858c7.vmdk or device None with type streamOptimized {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1022.711267] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b89ecf03-4db9-40d4-919f-4ab381cf3e9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.740136] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1022.740136] env[62627]: value = "task-2194763" [ 1022.740136] env[62627]: _type = "Task" [ 1022.740136] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.752635] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.758024] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194763, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.795174] env[62627]: INFO nova.scheduler.client.report [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Deleted allocations for instance ab999825-4310-4fd8-81f0-6bb5dde4175b [ 1022.937443] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194761, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.962662] env[62627]: INFO nova.compute.manager [-] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Took 1.45 seconds to deallocate network for instance. [ 1022.983588] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194762, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.087428] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194758, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.25318} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.087711] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37/58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.087919] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.088189] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f01620e7-42ef-4b12-bc3e-49aa2696e92e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.095526] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for the task: (returnval){ [ 1023.095526] env[62627]: value = "task-2194764" [ 1023.095526] env[62627]: _type = "Task" [ 1023.095526] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.109092] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194764, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.114791] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: db8ddb7b-b119-415c-98d9-dc5b3d2868b7] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1023.214497] env[62627]: DEBUG nova.network.neutron [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Updated VIF entry in instance network info cache for port 1dbb6dda-f003-4949-8bd8-ae4bd148a135. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1023.214917] env[62627]: DEBUG nova.network.neutron [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Updating instance_info_cache with network_info: [{"id": "1dbb6dda-f003-4949-8bd8-ae4bd148a135", "address": "fa:16:3e:ee:3f:26", "network": {"id": "c8f2b47a-fb3f-4fb5-a55a-d527bd4b6f03", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2107012038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e06a7550ef84edd9861cafa0bfcb857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dbb6dda-f0", "ovs_interfaceid": "1dbb6dda-f003-4949-8bd8-ae4bd148a135", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.250477] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194763, 'name': ReconfigVM_Task, 'duration_secs': 0.343275} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.250754] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 7184a7d2-72c6-48f2-b125-880f1f9858c7/7184a7d2-72c6-48f2-b125-880f1f9858c7.vmdk or device None with type streamOptimized {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1023.251435] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6622c0b8-a936-4e52-8acd-108092e42c17 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.260859] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1023.260859] env[62627]: value = "task-2194765" [ 1023.260859] env[62627]: _type = "Task" [ 1023.260859] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.279710] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194765, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.305264] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ae6b1cae-0174-4f2d-9eca-bbb146ac4472 tempest-MigrationsAdminTest-1384528618 tempest-MigrationsAdminTest-1384528618-project-member] Lock "ab999825-4310-4fd8-81f0-6bb5dde4175b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.885s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.432463] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194761, 'name': CreateVM_Task, 'duration_secs': 1.088113} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.432626] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1023.433332] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.433483] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.433807] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1023.434174] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-829246dd-c598-4fdd-a731-4cd1e68d8ee4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.439921] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 1023.439921] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5282c8a0-cd03-fc73-6ca4-e62d967baa5d" [ 1023.439921] env[62627]: _type = "Task" [ 1023.439921] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.448917] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5282c8a0-cd03-fc73-6ca4-e62d967baa5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.476338] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194762, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.477237] env[62627]: DEBUG oslo_concurrency.lockutils [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.477544] env[62627]: DEBUG oslo_concurrency.lockutils [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.477898] env[62627]: DEBUG nova.objects.instance [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lazy-loading 'resources' on Instance uuid 7366e4b0-bdba-435d-844d-8e81c4464b31 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.610203] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194764, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070869} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.610664] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.613021] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7859562b-a12b-413f-8829-a0ac41038d48 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.630992] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 77c60059-535a-47ca-a5b0-45cc1df93b45] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1023.646554] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37/58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.647151] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6b2c699-7d96-47ea-a796-77fc7f26a1fc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.683992] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for the task: (returnval){ [ 1023.683992] env[62627]: value = "task-2194766" [ 1023.683992] env[62627]: _type = "Task" [ 1023.683992] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.697877] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194766, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.717560] env[62627]: DEBUG oslo_concurrency.lockutils [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] Releasing lock "refresh_cache-bd583f6b-be8d-4f02-a67e-0ea10e6dd087" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.717849] env[62627]: DEBUG nova.compute.manager [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Received event network-vif-deleted-81a799c8-db8c-4bae-8609-d4bf5b56c600 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1023.718502] env[62627]: INFO nova.compute.manager [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Neutron deleted interface 81a799c8-db8c-4bae-8609-d4bf5b56c600; detaching it from the instance and deleting it from the info cache [ 1023.718502] env[62627]: DEBUG nova.network.neutron [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.770235] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194765, 'name': Rename_Task, 'duration_secs': 0.149873} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.770604] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1023.770870] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8bdd9bc9-f6f1-44bf-b8b8-3ff5252aed8e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.779921] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1023.779921] env[62627]: value = "task-2194767" [ 1023.779921] env[62627]: _type = "Task" [ 1023.779921] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.789061] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194767, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.958786] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5282c8a0-cd03-fc73-6ca4-e62d967baa5d, 'name': SearchDatastore_Task, 'duration_secs': 0.011289} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.960136] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.960765] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1023.961572] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.961572] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.961715] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1023.962462] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73461dbb-c727-4c19-8783-79c77c9c3583 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.974621] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1023.975199] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1023.980920] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff240f34-d93b-4bf3-919e-5f907e79fdb2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.988832] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194762, 'name': CloneVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.994634] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 1023.994634] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b59061-8498-8703-863a-7bce3e234a06" [ 1023.994634] env[62627]: _type = "Task" [ 1023.994634] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.008639] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b59061-8498-8703-863a-7bce3e234a06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.148772] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 1298559d-14fb-422c-8206-a35a747f0c06] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1024.195018] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194766, 'name': ReconfigVM_Task, 'duration_secs': 0.407875} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.197998] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37/58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.198824] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02f2a90e-f5a3-4395-9ab0-525eef46ab9a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.211130] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for the task: (returnval){ [ 1024.211130] env[62627]: value = "task-2194768" [ 1024.211130] env[62627]: _type = "Task" [ 1024.211130] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.222480] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194768, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.226184] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8be82d0-4be9-415b-bf69-625070b61cc3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.237107] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e108a70-9293-44e6-b0cb-aedd48b5b581 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.279954] env[62627]: DEBUG nova.compute.manager [req-1724e486-cf85-4e15-beca-cf1efeeadd9c req-6e69176f-386d-4910-8ab6-85ca07466d8f service nova] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Detach interface failed, port_id=81a799c8-db8c-4bae-8609-d4bf5b56c600, reason: Instance 7366e4b0-bdba-435d-844d-8e81c4464b31 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1024.289691] env[62627]: DEBUG oslo_vmware.api [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194767, 'name': PowerOnVM_Task, 'duration_secs': 0.492537} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.292545] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1024.293145] env[62627]: INFO nova.compute.manager [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Took 13.95 seconds to spawn the instance on the hypervisor. [ 1024.293145] env[62627]: DEBUG nova.compute.manager [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1024.294024] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6fa773a-e490-4f73-a8ac-b2dac27a1ad7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.335499] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b3a201-0ea3-4e7b-81d4-623e6dc25ae9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.344776] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e885361-073c-4e05-b731-935e4325c1fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.380282] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c356dc3f-2bb7-4a43-8077-638f12b858f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.388370] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde868e3-6110-4b08-887c-69980e540426 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.405021] env[62627]: DEBUG nova.compute.provider_tree [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.480333] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194762, 'name': CloneVM_Task, 'duration_secs': 1.555109} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.482097] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Created linked-clone VM from snapshot [ 1024.482097] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5395e0-106a-41bf-8450-c8a03e02c994 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.493015] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Uploading image f54613ff-1549-4bc9-a5c2-54cc3ba07ce3 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1024.508285] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b59061-8498-8703-863a-7bce3e234a06, 'name': SearchDatastore_Task, 'duration_secs': 0.010996} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.510246] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f459f443-222b-4d5b-a1b9-8e54aa215bb6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.520019] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 1024.520019] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f9ff1b-47bb-430e-cf9f-68a76150d08e" [ 1024.520019] env[62627]: _type = "Task" [ 1024.520019] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.526508] env[62627]: DEBUG oslo_vmware.rw_handles [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1024.526508] env[62627]: value = "vm-447736" [ 1024.526508] env[62627]: _type = "VirtualMachine" [ 1024.526508] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1024.526866] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a333eabc-43fb-4b1e-99ae-7700f2d28af2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.543090] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f9ff1b-47bb-430e-cf9f-68a76150d08e, 'name': SearchDatastore_Task, 'duration_secs': 0.01138} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.543523] env[62627]: DEBUG oslo_vmware.rw_handles [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lease: (returnval){ [ 1024.543523] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c5fb71-55c5-7834-b1e1-ae522b1d6945" [ 1024.543523] env[62627]: _type = "HttpNfcLease" [ 1024.543523] env[62627]: } obtained for exporting VM: (result){ [ 1024.543523] env[62627]: value = "vm-447736" [ 1024.543523] env[62627]: _type = "VirtualMachine" [ 1024.543523] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1024.544210] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the lease: (returnval){ [ 1024.544210] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c5fb71-55c5-7834-b1e1-ae522b1d6945" [ 1024.544210] env[62627]: _type = "HttpNfcLease" [ 1024.544210] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1024.544210] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.544838] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] bd583f6b-be8d-4f02-a67e-0ea10e6dd087/bd583f6b-be8d-4f02-a67e-0ea10e6dd087.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1024.545396] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af1acb08-94be-4079-b12d-ec4a38ee1a87 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.557137] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1024.557137] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c5fb71-55c5-7834-b1e1-ae522b1d6945" [ 1024.557137] env[62627]: _type = "HttpNfcLease" [ 1024.557137] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1024.558723] env[62627]: DEBUG oslo_vmware.rw_handles [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1024.558723] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c5fb71-55c5-7834-b1e1-ae522b1d6945" [ 1024.558723] env[62627]: _type = "HttpNfcLease" [ 1024.558723] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1024.559405] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 1024.559405] env[62627]: value = "task-2194770" [ 1024.559405] env[62627]: _type = "Task" [ 1024.559405] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.560186] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25e59ee-b51a-4793-9cc5-2915c341a12c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.580270] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194770, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.580608] env[62627]: DEBUG oslo_vmware.rw_handles [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520201d1-7bd2-27c1-c3c9-fb83295ba3fc/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1024.580831] env[62627]: DEBUG oslo_vmware.rw_handles [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520201d1-7bd2-27c1-c3c9-fb83295ba3fc/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1024.654857] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: a674db15-ea4e-44c8-abc1-6af425286517] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1024.680027] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fcb1ab94-4e50-4f3a-8053-45f44d102fcf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.723884] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194768, 'name': Rename_Task, 'duration_secs': 0.168045} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.723884] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1024.723884] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c15f735-4dcb-4a6d-94eb-c0d0afc33342 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.732469] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for the task: (returnval){ [ 1024.732469] env[62627]: value = "task-2194771" [ 1024.732469] env[62627]: _type = "Task" [ 1024.732469] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.745417] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194771, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.821257] env[62627]: INFO nova.compute.manager [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Took 28.20 seconds to build instance. [ 1024.909424] env[62627]: DEBUG nova.scheduler.client.report [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1025.075873] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194770, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490933} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.076242] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] bd583f6b-be8d-4f02-a67e-0ea10e6dd087/bd583f6b-be8d-4f02-a67e-0ea10e6dd087.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1025.076334] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1025.076633] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-537fa27a-bd11-4e06-8914-8f690fefc9c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.085186] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 1025.085186] env[62627]: value = "task-2194772" [ 1025.085186] env[62627]: _type = "Task" [ 1025.085186] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.097768] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194772, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.159818] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: f15982c3-23ed-4f25-bf55-3418cac117cc] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1025.245419] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194771, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.323647] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0334a4c4-ab3d-40e5-a2ef-fde26e7c01f6 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "7184a7d2-72c6-48f2-b125-880f1f9858c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.714s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.419280] env[62627]: DEBUG oslo_concurrency.lockutils [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.942s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.449632] env[62627]: INFO nova.scheduler.client.report [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Deleted allocations for instance 7366e4b0-bdba-435d-844d-8e81c4464b31 [ 1025.597796] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194772, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082245} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.598171] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1025.599166] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1ea6e3-4464-4bbe-86bf-e3a3ffc44cf7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.625073] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] bd583f6b-be8d-4f02-a67e-0ea10e6dd087/bd583f6b-be8d-4f02-a67e-0ea10e6dd087.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1025.626705] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44c50e55-2ba6-40b0-bf35-385898984bae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.658929] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 1025.658929] env[62627]: value = "task-2194773" [ 1025.658929] env[62627]: _type = "Task" [ 1025.658929] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.663985] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 77418dff-6bdc-4e0a-954a-45c80960dbf1] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1025.685483] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194773, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.747458] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194771, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.958235] env[62627]: DEBUG oslo_concurrency.lockutils [None req-42a3e783-672b-4d5a-97e5-15db9d0f6ba5 tempest-ServersAdminTestJSON-158660873 tempest-ServersAdminTestJSON-158660873-project-member] Lock "7366e4b0-bdba-435d-844d-8e81c4464b31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.093s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.150999] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "7184a7d2-72c6-48f2-b125-880f1f9858c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.151236] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "7184a7d2-72c6-48f2-b125-880f1f9858c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.151414] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "7184a7d2-72c6-48f2-b125-880f1f9858c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.151651] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "7184a7d2-72c6-48f2-b125-880f1f9858c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.151978] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "7184a7d2-72c6-48f2-b125-880f1f9858c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.154883] env[62627]: INFO nova.compute.manager [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Terminating instance [ 1026.167448] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 5fbca4da-dbb9-4be7-84b1-ee816d7f1fb0] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1026.180384] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194773, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.245628] env[62627]: DEBUG oslo_vmware.api [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194771, 'name': PowerOnVM_Task, 'duration_secs': 1.456999} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.246626] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1026.246967] env[62627]: INFO nova.compute.manager [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Took 11.56 seconds to spawn the instance on the hypervisor. [ 1026.247264] env[62627]: DEBUG nova.compute.manager [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1026.248514] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a11fdc-7c9c-4dc1-8cea-0bc938d7cd7f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.660045] env[62627]: DEBUG nova.compute.manager [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1026.660289] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1026.661237] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b9ba76-d2c6-4bdf-8ec9-46018ebd27c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.672872] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.672872] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Cleaning up deleted instances with incomplete migration {{(pid=62627) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1026.683104] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194773, 'name': ReconfigVM_Task, 'duration_secs': 0.527904} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.685822] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Reconfigured VM instance instance-00000047 to attach disk [datastore2] bd583f6b-be8d-4f02-a67e-0ea10e6dd087/bd583f6b-be8d-4f02-a67e-0ea10e6dd087.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1026.686535] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1026.686767] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26974b4a-0210-4521-8323-8e9157d06662 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.688564] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-525a2d03-17ad-48a8-89d7-bc6890edee5b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.697512] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 1026.697512] env[62627]: value = "task-2194774" [ 1026.697512] env[62627]: _type = "Task" [ 1026.697512] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.700351] env[62627]: DEBUG oslo_vmware.api [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1026.700351] env[62627]: value = "task-2194775" [ 1026.700351] env[62627]: _type = "Task" [ 1026.700351] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.713931] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194774, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.717762] env[62627]: DEBUG oslo_vmware.api [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194775, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.770187] env[62627]: INFO nova.compute.manager [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Took 25.40 seconds to build instance. [ 1027.175308] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1027.219274] env[62627]: DEBUG oslo_vmware.api [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194775, 'name': PowerOffVM_Task, 'duration_secs': 0.224548} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.223304] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1027.223304] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1027.223304] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194774, 'name': Rename_Task, 'duration_secs': 0.177985} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.223459] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44f10597-5722-4316-a3fa-8e5b074de80b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.225842] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1027.226424] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1cf8ff7-63be-4128-9feb-297940ee9546 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.236764] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 1027.236764] env[62627]: value = "task-2194776" [ 1027.236764] env[62627]: _type = "Task" [ 1027.236764] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.246888] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194776, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.273675] env[62627]: DEBUG oslo_concurrency.lockutils [None req-115ac835-10e3-4f17-b8a1-8e6d27a00a51 tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.910s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.345530] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1027.345530] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1027.345530] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleting the datastore file [datastore2] 7184a7d2-72c6-48f2-b125-880f1f9858c7 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1027.345530] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afe8938e-a21d-4998-a60a-f8d0be97e617 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.354700] env[62627]: DEBUG oslo_vmware.api [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1027.354700] env[62627]: value = "task-2194778" [ 1027.354700] env[62627]: _type = "Task" [ 1027.354700] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.367787] env[62627]: DEBUG oslo_vmware.api [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194778, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.748559] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194776, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.866821] env[62627]: DEBUG oslo_vmware.api [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194778, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234302} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.867357] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.867627] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1027.868037] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1027.868291] env[62627]: INFO nova.compute.manager [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1027.868750] env[62627]: DEBUG oslo.service.loopingcall [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.869250] env[62627]: DEBUG nova.compute.manager [-] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1027.869450] env[62627]: DEBUG nova.network.neutron [-] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1028.251200] env[62627]: DEBUG oslo_vmware.api [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194776, 'name': PowerOnVM_Task, 'duration_secs': 0.578189} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.251452] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1028.251662] env[62627]: INFO nova.compute.manager [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Took 9.24 seconds to spawn the instance on the hypervisor. [ 1028.251841] env[62627]: DEBUG nova.compute.manager [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1028.252731] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe2cd85-ea82-47f8-a8c9-b4db57021b90 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.610294] env[62627]: DEBUG nova.compute.manager [req-2f4e2368-ba82-41de-ae8e-7c508ee19572 req-e70c68f4-cb5f-4205-b043-0ad46b4f774f service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Received event network-vif-deleted-28c58084-5681-433c-a2ce-043547a7f315 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1028.610294] env[62627]: INFO nova.compute.manager [req-2f4e2368-ba82-41de-ae8e-7c508ee19572 req-e70c68f4-cb5f-4205-b043-0ad46b4f774f service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Neutron deleted interface 28c58084-5681-433c-a2ce-043547a7f315; detaching it from the instance and deleting it from the info cache [ 1028.610294] env[62627]: DEBUG nova.network.neutron [req-2f4e2368-ba82-41de-ae8e-7c508ee19572 req-e70c68f4-cb5f-4205-b043-0ad46b4f774f service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.780939] env[62627]: INFO nova.compute.manager [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Took 18.14 seconds to build instance. [ 1028.844939] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquiring lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.844939] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.844939] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquiring lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.844939] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.844939] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.845910] env[62627]: INFO nova.compute.manager [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Terminating instance [ 1029.089667] env[62627]: DEBUG nova.network.neutron [-] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.113699] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95056e49-a8f6-44e6-a513-a276731a20fe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.129637] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c10014-dd4b-45fc-9ebe-253e802bf51d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.176165] env[62627]: DEBUG nova.compute.manager [req-2f4e2368-ba82-41de-ae8e-7c508ee19572 req-e70c68f4-cb5f-4205-b043-0ad46b4f774f service nova] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Detach interface failed, port_id=28c58084-5681-433c-a2ce-043547a7f315, reason: Instance 7184a7d2-72c6-48f2-b125-880f1f9858c7 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1029.250840] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquiring lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.250840] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.282682] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db816bba-d86a-4648-8f18-747e9414dee5 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.652s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.350238] env[62627]: DEBUG nova.compute.manager [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1029.350837] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1029.351617] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f077fe87-1823-42f6-a0a2-e59e0dea5ed7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.363798] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1029.364126] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91d9f110-4b3d-4af0-93f1-1d7d56425c5d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.372580] env[62627]: DEBUG oslo_vmware.api [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for the task: (returnval){ [ 1029.372580] env[62627]: value = "task-2194779" [ 1029.372580] env[62627]: _type = "Task" [ 1029.372580] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.382236] env[62627]: DEBUG oslo_vmware.api [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194779, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.595403] env[62627]: INFO nova.compute.manager [-] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Took 1.73 seconds to deallocate network for instance. [ 1029.755216] env[62627]: DEBUG nova.compute.manager [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1029.820408] env[62627]: DEBUG oslo_concurrency.lockutils [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.820408] env[62627]: DEBUG oslo_concurrency.lockutils [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.820408] env[62627]: DEBUG oslo_concurrency.lockutils [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.820924] env[62627]: DEBUG oslo_concurrency.lockutils [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.820924] env[62627]: DEBUG oslo_concurrency.lockutils [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.824488] env[62627]: INFO nova.compute.manager [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Terminating instance [ 1029.883546] env[62627]: DEBUG oslo_vmware.api [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194779, 'name': PowerOffVM_Task, 'duration_secs': 0.256974} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.884047] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.884154] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.884464] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e663579d-a7ff-48d5-95ab-8d80ae1be5c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.949717] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.950646] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.950987] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Deleting the datastore file [datastore2] 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.951801] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-611da53b-6244-48f4-af8b-e9decfe45080 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.959791] env[62627]: DEBUG oslo_vmware.api [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for the task: (returnval){ [ 1029.959791] env[62627]: value = "task-2194781" [ 1029.959791] env[62627]: _type = "Task" [ 1029.959791] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.969806] env[62627]: DEBUG oslo_vmware.api [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.107018] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.107018] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.107018] env[62627]: DEBUG nova.objects.instance [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lazy-loading 'resources' on Instance uuid 7184a7d2-72c6-48f2-b125-880f1f9858c7 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.287642] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.328653] env[62627]: DEBUG nova.compute.manager [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1030.328996] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1030.334223] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ee487c-d515-4f36-bf01-5c6774b322c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.340712] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1030.341172] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-913f692c-4cd7-4ff5-a4e4-bd9a493e534a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.349668] env[62627]: DEBUG oslo_vmware.api [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 1030.349668] env[62627]: value = "task-2194782" [ 1030.349668] env[62627]: _type = "Task" [ 1030.349668] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.359492] env[62627]: DEBUG oslo_vmware.api [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.471412] env[62627]: DEBUG oslo_vmware.api [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Task: {'id': task-2194781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.283662} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.471751] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1030.472011] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1030.472280] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1030.472524] env[62627]: INFO nova.compute.manager [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1030.472834] env[62627]: DEBUG oslo.service.loopingcall [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1030.473080] env[62627]: DEBUG nova.compute.manager [-] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1030.473868] env[62627]: DEBUG nova.network.neutron [-] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1030.866619] env[62627]: DEBUG oslo_vmware.api [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194782, 'name': PowerOffVM_Task, 'duration_secs': 0.279439} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.868295] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1030.868550] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1030.869160] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b91580b3-21e5-4af1-8a4b-b26df6a91816 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.892765] env[62627]: DEBUG nova.compute.manager [req-75527395-3b31-4423-be1b-e309d3edb0ed req-b723396d-a066-4092-93ba-4e379b4056e7 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Received event network-vif-deleted-86cb7370-5805-448b-bd4b-6204a2aa2a76 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1030.892765] env[62627]: INFO nova.compute.manager [req-75527395-3b31-4423-be1b-e309d3edb0ed req-b723396d-a066-4092-93ba-4e379b4056e7 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Neutron deleted interface 86cb7370-5805-448b-bd4b-6204a2aa2a76; detaching it from the instance and deleting it from the info cache [ 1030.893066] env[62627]: DEBUG nova.network.neutron [req-75527395-3b31-4423-be1b-e309d3edb0ed req-b723396d-a066-4092-93ba-4e379b4056e7 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.925116] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ad9bb0-c8c1-49f9-a910-407dac925620 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.936064] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3b6dab-978b-43fb-8089-d1465865934b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.943224] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1030.943716] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1030.944058] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Deleting the datastore file [datastore2] bd583f6b-be8d-4f02-a67e-0ea10e6dd087 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1030.945205] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-901b5ba7-a0ae-48f3-90b8-264c6e10bd8f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.978276] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquiring lock "f8003eee-248c-4005-8465-6e3e96f98f88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.978519] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "f8003eee-248c-4005-8465-6e3e96f98f88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.980165] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1c130f-f126-4a8e-80af-d8c74c8bdcf4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.985038] env[62627]: DEBUG oslo_vmware.api [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for the task: (returnval){ [ 1030.985038] env[62627]: value = "task-2194784" [ 1030.985038] env[62627]: _type = "Task" [ 1030.985038] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.993287] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6f02fa-14d2-43bf-8d7d-0ec5f90aa93a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.001443] env[62627]: DEBUG oslo_vmware.api [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.012700] env[62627]: DEBUG nova.compute.provider_tree [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.370394] env[62627]: DEBUG nova.network.neutron [-] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.399013] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb99328a-70c2-4a9f-8f3c-52960f139ac9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.409463] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6466dd8a-4226-415f-98ce-48de847822de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.443511] env[62627]: DEBUG nova.compute.manager [req-75527395-3b31-4423-be1b-e309d3edb0ed req-b723396d-a066-4092-93ba-4e379b4056e7 service nova] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Detach interface failed, port_id=86cb7370-5805-448b-bd4b-6204a2aa2a76, reason: Instance 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1031.485072] env[62627]: DEBUG nova.compute.manager [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1031.497568] env[62627]: DEBUG oslo_vmware.api [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Task: {'id': task-2194784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175222} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.497840] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1031.498069] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1031.498305] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1031.498598] env[62627]: INFO nova.compute.manager [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1031.498696] env[62627]: DEBUG oslo.service.loopingcall [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1031.498875] env[62627]: DEBUG nova.compute.manager [-] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1031.498967] env[62627]: DEBUG nova.network.neutron [-] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1031.516344] env[62627]: DEBUG nova.scheduler.client.report [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1031.873464] env[62627]: INFO nova.compute.manager [-] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Took 1.40 seconds to deallocate network for instance. [ 1032.013887] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.022330] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.917s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.024593] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.737s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.026253] env[62627]: INFO nova.compute.claims [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.054851] env[62627]: INFO nova.scheduler.client.report [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleted allocations for instance 7184a7d2-72c6-48f2-b125-880f1f9858c7 [ 1032.223269] env[62627]: DEBUG nova.network.neutron [-] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.380572] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.564055] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebc83ef4-9d7d-46cb-bc53-587dcd73092f tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "7184a7d2-72c6-48f2-b125-880f1f9858c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.413s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.726631] env[62627]: INFO nova.compute.manager [-] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Took 1.23 seconds to deallocate network for instance. [ 1032.967078] env[62627]: DEBUG oslo_vmware.rw_handles [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520201d1-7bd2-27c1-c3c9-fb83295ba3fc/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1032.968567] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632477fa-007e-4fd8-8fbc-b274c9ea1eca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.977071] env[62627]: DEBUG oslo_vmware.rw_handles [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520201d1-7bd2-27c1-c3c9-fb83295ba3fc/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1032.977347] env[62627]: ERROR oslo_vmware.rw_handles [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520201d1-7bd2-27c1-c3c9-fb83295ba3fc/disk-0.vmdk due to incomplete transfer. [ 1032.977554] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-860082f3-5364-4440-a23b-045398f48177 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.988555] env[62627]: DEBUG oslo_vmware.rw_handles [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520201d1-7bd2-27c1-c3c9-fb83295ba3fc/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1032.988757] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Uploaded image f54613ff-1549-4bc9-a5c2-54cc3ba07ce3 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1032.993628] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1032.993948] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e5aec8f8-1eac-492f-a4eb-bdc98d6d7080 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.003969] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1033.003969] env[62627]: value = "task-2194785" [ 1033.003969] env[62627]: _type = "Task" [ 1033.003969] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.014675] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194785, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.018398] env[62627]: DEBUG nova.compute.manager [req-5d662928-32da-4f7c-b5bb-c3a41bbb2116 req-9e79f61f-3f65-42b1-8f75-b8633c3aa54a service nova] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Received event network-vif-deleted-1dbb6dda-f003-4949-8bd8-ae4bd148a135 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1033.124699] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.124940] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.125232] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.125421] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.125628] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.127716] env[62627]: INFO nova.compute.manager [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Terminating instance [ 1033.233950] env[62627]: DEBUG oslo_concurrency.lockutils [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.274635] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122f586a-c7dd-4ecd-b82b-7f3cb3bbd22d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.282933] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f2c542-9dda-4e71-9e51-0c9ac1afd85c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.316429] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec708f0d-d2b5-408c-b2b8-ba77d10aa0d1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.324818] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f60383a-6929-41f9-b84d-fd706d198f53 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.341728] env[62627]: DEBUG nova.compute.provider_tree [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.514461] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194785, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.635573] env[62627]: DEBUG nova.compute.manager [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1033.635573] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1033.635573] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2adfa52-1670-4595-9b84-04bf82ccbae6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.641032] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1033.641258] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04cb6feb-1b6c-4010-afa5-5487fe6c1620 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.650154] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.650718] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.651508] env[62627]: DEBUG oslo_vmware.api [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1033.651508] env[62627]: value = "task-2194786" [ 1033.651508] env[62627]: _type = "Task" [ 1033.651508] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.663950] env[62627]: DEBUG oslo_vmware.api [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.845534] env[62627]: DEBUG nova.scheduler.client.report [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1034.018138] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194785, 'name': Destroy_Task, 'duration_secs': 0.750012} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.018445] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Destroyed the VM [ 1034.018647] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1034.018904] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-349b90b5-e850-4d0a-bd77-1842ba6a4ecb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.031032] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1034.031032] env[62627]: value = "task-2194787" [ 1034.031032] env[62627]: _type = "Task" [ 1034.031032] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.038467] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194787, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.153046] env[62627]: DEBUG nova.compute.manager [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1034.168349] env[62627]: DEBUG oslo_vmware.api [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194786, 'name': PowerOffVM_Task, 'duration_secs': 0.206316} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.168633] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1034.168808] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1034.169562] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be9fd4ea-7cea-4736-93d0-9ae5c9b33a3c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.239509] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1034.239509] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1034.239509] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleting the datastore file [datastore2] e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1034.239784] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8d670bd-432e-4bfd-8003-e932e524c85e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.247472] env[62627]: DEBUG oslo_vmware.api [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1034.247472] env[62627]: value = "task-2194789" [ 1034.247472] env[62627]: _type = "Task" [ 1034.247472] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.258467] env[62627]: DEBUG oslo_vmware.api [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194789, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.351673] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.352152] env[62627]: DEBUG nova.compute.manager [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1034.357156] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.341s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.357156] env[62627]: INFO nova.compute.claims [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.542047] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194787, 'name': RemoveSnapshot_Task, 'duration_secs': 0.362089} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.542669] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1034.543012] env[62627]: DEBUG nova.compute.manager [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1034.543790] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a66864-bd7b-444c-a4c0-4469f7b6bb45 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.682410] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.759314] env[62627]: DEBUG oslo_vmware.api [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194789, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13256} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.760114] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1034.760114] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1034.760114] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1034.760286] env[62627]: INFO nova.compute.manager [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1034.760405] env[62627]: DEBUG oslo.service.loopingcall [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.760601] env[62627]: DEBUG nova.compute.manager [-] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1034.760695] env[62627]: DEBUG nova.network.neutron [-] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1034.861458] env[62627]: DEBUG nova.compute.utils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1034.864605] env[62627]: DEBUG nova.compute.manager [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1034.864781] env[62627]: DEBUG nova.network.neutron [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1034.962387] env[62627]: DEBUG nova.policy [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdf119950a634bc29d4158e7a3dabcd8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83faa1f468534985b7de71482956559b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1035.058083] env[62627]: INFO nova.compute.manager [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Shelve offloading [ 1035.150203] env[62627]: DEBUG nova.compute.manager [req-0ab4db2b-1f73-4b84-be8d-baefb11fe92b req-d683e65a-541f-4774-8abb-5c66d17ea378 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Received event network-vif-deleted-fcb37279-335e-48f7-8d54-32ecee8fa761 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1035.150203] env[62627]: INFO nova.compute.manager [req-0ab4db2b-1f73-4b84-be8d-baefb11fe92b req-d683e65a-541f-4774-8abb-5c66d17ea378 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Neutron deleted interface fcb37279-335e-48f7-8d54-32ecee8fa761; detaching it from the instance and deleting it from the info cache [ 1035.151099] env[62627]: DEBUG nova.network.neutron [req-0ab4db2b-1f73-4b84-be8d-baefb11fe92b req-d683e65a-541f-4774-8abb-5c66d17ea378 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.371058] env[62627]: DEBUG nova.compute.manager [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1035.391978] env[62627]: DEBUG nova.network.neutron [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Successfully created port: 3597f9e3-4248-4b24-b7ca-bce13900128a {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1035.542086] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquiring lock "513c755a-424a-4e47-887e-7b315ad2e583" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.542086] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Lock "513c755a-424a-4e47-887e-7b315ad2e583" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.562379] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1035.562516] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46626581-2c46-4101-96c1-99ffadab5d01 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.574648] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1035.574648] env[62627]: value = "task-2194790" [ 1035.574648] env[62627]: _type = "Task" [ 1035.574648] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.586468] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1035.586703] env[62627]: DEBUG nova.compute.manager [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1035.587472] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b64545-f26a-4a28-a210-cc4dc8a18af6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.593734] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.593897] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.594076] env[62627]: DEBUG nova.network.neutron [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.624035] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba18e8e3-41cb-44f4-af99-4cffa2f8a9c6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.627904] env[62627]: DEBUG nova.network.neutron [-] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.634252] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d0bab8-0609-462f-8b05-3afb855ce6c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.673410] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4899d80d-1738-49b6-a609-e22183afe427 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.674376] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64008baf-17ea-43d5-bfb6-5d59a9232a63 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.684635] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4740026-2873-457f-afd1-778066c7bda4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.691809] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f06cd1-3aac-4431-9c30-ba6a1459571c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.712265] env[62627]: DEBUG nova.compute.provider_tree [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1035.722916] env[62627]: DEBUG nova.compute.manager [req-0ab4db2b-1f73-4b84-be8d-baefb11fe92b req-d683e65a-541f-4774-8abb-5c66d17ea378 service nova] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Detach interface failed, port_id=fcb37279-335e-48f7-8d54-32ecee8fa761, reason: Instance e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1036.043574] env[62627]: DEBUG nova.compute.manager [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1036.130112] env[62627]: INFO nova.compute.manager [-] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Took 1.37 seconds to deallocate network for instance. [ 1036.252315] env[62627]: ERROR nova.scheduler.client.report [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [req-1d694752-234e-4b0e-856b-930bfcad1838] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1d694752-234e-4b0e-856b-930bfcad1838"}]} [ 1036.271473] env[62627]: DEBUG nova.scheduler.client.report [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1036.287270] env[62627]: DEBUG nova.scheduler.client.report [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1036.287503] env[62627]: DEBUG nova.compute.provider_tree [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1036.304198] env[62627]: DEBUG nova.scheduler.client.report [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1036.330888] env[62627]: DEBUG nova.scheduler.client.report [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1036.332758] env[62627]: DEBUG nova.network.neutron [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Updating instance_info_cache with network_info: [{"id": "21eb745b-bf3f-4539-b50e-df8c6ff07be1", "address": "fa:16:3e:d0:49:53", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21eb745b-bf", "ovs_interfaceid": "21eb745b-bf3f-4539-b50e-df8c6ff07be1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.379981] env[62627]: DEBUG nova.compute.manager [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1036.403530] env[62627]: DEBUG nova.virt.hardware [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1036.403530] env[62627]: DEBUG nova.virt.hardware [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1036.403721] env[62627]: DEBUG nova.virt.hardware [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1036.403855] env[62627]: DEBUG nova.virt.hardware [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1036.404009] env[62627]: DEBUG nova.virt.hardware [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1036.404194] env[62627]: DEBUG nova.virt.hardware [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1036.404399] env[62627]: DEBUG nova.virt.hardware [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1036.404557] env[62627]: DEBUG nova.virt.hardware [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1036.404722] env[62627]: DEBUG nova.virt.hardware [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1036.404880] env[62627]: DEBUG nova.virt.hardware [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1036.405072] env[62627]: DEBUG nova.virt.hardware [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1036.405970] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ef14c7-2045-42d1-bd34-25be09438508 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.414591] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2ab38b-f31f-4e1c-b231-5bf02aae6525 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.529381] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1711321-ea69-48c5-b431-951cba6f7f56 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.537911] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4101f639-d714-4b6d-8438-899bcf058616 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.570171] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6373050e-dbe5-4693-af6d-7c89d8fc1ee0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.577709] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fb9d16-5029-4bda-94d3-995879ad4782 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.582256] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.591804] env[62627]: DEBUG nova.compute.provider_tree [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1036.635440] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.835759] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.965890] env[62627]: DEBUG nova.network.neutron [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Successfully updated port: 3597f9e3-4248-4b24-b7ca-bce13900128a {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.129639] env[62627]: DEBUG nova.scheduler.client.report [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1037.129937] env[62627]: DEBUG nova.compute.provider_tree [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 90 to 91 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1037.130154] env[62627]: DEBUG nova.compute.provider_tree [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1037.144576] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1037.145461] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d250f2-a45a-4596-85f8-0b69aaefa899 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.154147] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1037.154383] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce4cf316-4eac-43f3-9e33-06d1d0b77cf9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.179336] env[62627]: DEBUG nova.compute.manager [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Received event network-vif-plugged-3597f9e3-4248-4b24-b7ca-bce13900128a {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1037.179582] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Acquiring lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.179823] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.180039] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.180240] env[62627]: DEBUG nova.compute.manager [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] No waiting events found dispatching network-vif-plugged-3597f9e3-4248-4b24-b7ca-bce13900128a {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1037.180435] env[62627]: WARNING nova.compute.manager [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Received unexpected event network-vif-plugged-3597f9e3-4248-4b24-b7ca-bce13900128a for instance with vm_state building and task_state spawning. [ 1037.180589] env[62627]: DEBUG nova.compute.manager [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Received event network-changed-3597f9e3-4248-4b24-b7ca-bce13900128a {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1037.180771] env[62627]: DEBUG nova.compute.manager [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Refreshing instance network info cache due to event network-changed-3597f9e3-4248-4b24-b7ca-bce13900128a. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1037.181019] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Acquiring lock "refresh_cache-16ac82e3-b9e3-4c34-bb8c-2989cc33246f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.181204] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Acquired lock "refresh_cache-16ac82e3-b9e3-4c34-bb8c-2989cc33246f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.181382] env[62627]: DEBUG nova.network.neutron [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Refreshing network info cache for port 3597f9e3-4248-4b24-b7ca-bce13900128a {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1037.210644] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1037.210871] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1037.210932] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleting the datastore file [datastore2] 07b08283-cbe5-4c36-9127-c423b259643d {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.211308] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b00b41fd-e5ee-4521-bf74-5b3583f561e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.218173] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1037.218173] env[62627]: value = "task-2194792" [ 1037.218173] env[62627]: _type = "Task" [ 1037.218173] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.225640] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194792, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.467122] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquiring lock "refresh_cache-16ac82e3-b9e3-4c34-bb8c-2989cc33246f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.635613] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.281s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.636171] env[62627]: DEBUG nova.compute.manager [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1037.638710] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.258s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.638927] env[62627]: DEBUG nova.objects.instance [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Lazy-loading 'resources' on Instance uuid 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.715620] env[62627]: DEBUG nova.network.neutron [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1037.727277] env[62627]: DEBUG oslo_vmware.api [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194792, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131918} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.727507] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.727772] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.727993] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.749053] env[62627]: INFO nova.scheduler.client.report [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted allocations for instance 07b08283-cbe5-4c36-9127-c423b259643d [ 1037.767323] env[62627]: DEBUG nova.network.neutron [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.141927] env[62627]: DEBUG nova.compute.utils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1038.143525] env[62627]: DEBUG nova.compute.manager [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1038.143525] env[62627]: DEBUG nova.network.neutron [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1038.194892] env[62627]: DEBUG nova.policy [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '842d4c0e120d449f8f664765c549e95f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '512907c88fac4d0eb5aa460036ced32b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1038.254545] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.269533] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Releasing lock "refresh_cache-16ac82e3-b9e3-4c34-bb8c-2989cc33246f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.269785] env[62627]: DEBUG nova.compute.manager [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Received event network-vif-unplugged-21eb745b-bf3f-4539-b50e-df8c6ff07be1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1038.270052] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Acquiring lock "07b08283-cbe5-4c36-9127-c423b259643d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.270266] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Lock "07b08283-cbe5-4c36-9127-c423b259643d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.270429] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Lock "07b08283-cbe5-4c36-9127-c423b259643d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.270590] env[62627]: DEBUG nova.compute.manager [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] No waiting events found dispatching network-vif-unplugged-21eb745b-bf3f-4539-b50e-df8c6ff07be1 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1038.271233] env[62627]: WARNING nova.compute.manager [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Received unexpected event network-vif-unplugged-21eb745b-bf3f-4539-b50e-df8c6ff07be1 for instance with vm_state shelved and task_state shelving_offloading. [ 1038.271233] env[62627]: DEBUG nova.compute.manager [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Received event network-changed-21eb745b-bf3f-4539-b50e-df8c6ff07be1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1038.271233] env[62627]: DEBUG nova.compute.manager [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Refreshing instance network info cache due to event network-changed-21eb745b-bf3f-4539-b50e-df8c6ff07be1. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1038.271453] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Acquiring lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.271632] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Acquired lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.271824] env[62627]: DEBUG nova.network.neutron [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Refreshing network info cache for port 21eb745b-bf3f-4539-b50e-df8c6ff07be1 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1038.272939] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquired lock "refresh_cache-16ac82e3-b9e3-4c34-bb8c-2989cc33246f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.273145] env[62627]: DEBUG nova.network.neutron [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1038.350971] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051b9c0b-78ed-4fc9-b5a4-c325516a8979 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.360136] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbb06b3-9d61-41c6-a06a-025abc7d8162 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.389011] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84481af-04db-430e-9c27-e1f97638c843 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.396446] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e4cd31-0c89-4510-87d9-e1a3a206396c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.409690] env[62627]: DEBUG nova.compute.provider_tree [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.520748] env[62627]: DEBUG nova.network.neutron [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Successfully created port: 66cad317-3512-4850-8957-332a2fffb1c0 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1038.601048] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "07b08283-cbe5-4c36-9127-c423b259643d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.650436] env[62627]: DEBUG nova.compute.manager [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1038.807608] env[62627]: DEBUG nova.network.neutron [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1038.912534] env[62627]: DEBUG nova.scheduler.client.report [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1039.006455] env[62627]: DEBUG nova.network.neutron [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Updating instance_info_cache with network_info: [{"id": "3597f9e3-4248-4b24-b7ca-bce13900128a", "address": "fa:16:3e:6c:e8:53", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3597f9e3-42", "ovs_interfaceid": "3597f9e3-4248-4b24-b7ca-bce13900128a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.133612] env[62627]: DEBUG nova.network.neutron [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Updated VIF entry in instance network info cache for port 21eb745b-bf3f-4539-b50e-df8c6ff07be1. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1039.133950] env[62627]: DEBUG nova.network.neutron [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Updating instance_info_cache with network_info: [{"id": "21eb745b-bf3f-4539-b50e-df8c6ff07be1", "address": "fa:16:3e:d0:49:53", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": null, "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap21eb745b-bf", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.417135] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.778s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.419785] env[62627]: DEBUG oslo_concurrency.lockutils [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.186s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.420037] env[62627]: DEBUG nova.objects.instance [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lazy-loading 'resources' on Instance uuid bd583f6b-be8d-4f02-a67e-0ea10e6dd087 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.441775] env[62627]: INFO nova.scheduler.client.report [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Deleted allocations for instance 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37 [ 1039.509013] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Releasing lock "refresh_cache-16ac82e3-b9e3-4c34-bb8c-2989cc33246f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.509355] env[62627]: DEBUG nova.compute.manager [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Instance network_info: |[{"id": "3597f9e3-4248-4b24-b7ca-bce13900128a", "address": "fa:16:3e:6c:e8:53", "network": {"id": "bd876f46-2bdf-4196-b718-c36f7e15f150", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ccb0cafa1a904d578ec4998b386bebf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3597f9e3-42", "ovs_interfaceid": "3597f9e3-4248-4b24-b7ca-bce13900128a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1039.509765] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:e8:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3597f9e3-4248-4b24-b7ca-bce13900128a', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1039.517037] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Creating folder: Project (83faa1f468534985b7de71482956559b). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1039.517301] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d3c7e5d-b670-4460-b30d-c92dea585bfb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.528113] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Created folder: Project (83faa1f468534985b7de71482956559b) in parent group-v447541. [ 1039.528301] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Creating folder: Instances. Parent ref: group-v447737. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1039.528524] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b9911d5-4606-4888-a03e-df5e488ca59d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.539025] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Created folder: Instances in parent group-v447737. [ 1039.539025] env[62627]: DEBUG oslo.service.loopingcall [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1039.539025] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1039.539025] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ba46c86-4ae1-4f1c-8ab4-d22111e3ad56 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.557358] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1039.557358] env[62627]: value = "task-2194795" [ 1039.557358] env[62627]: _type = "Task" [ 1039.557358] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.564728] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194795, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.636660] env[62627]: DEBUG oslo_concurrency.lockutils [req-4b807606-3c6d-484a-bd07-2770469f0ea9 req-f7c8a241-0f44-45c4-a81c-9858f126c870 service nova] Releasing lock "refresh_cache-07b08283-cbe5-4c36-9127-c423b259643d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.661023] env[62627]: DEBUG nova.compute.manager [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1039.683891] env[62627]: DEBUG nova.virt.hardware [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1039.684149] env[62627]: DEBUG nova.virt.hardware [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1039.684307] env[62627]: DEBUG nova.virt.hardware [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1039.684488] env[62627]: DEBUG nova.virt.hardware [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1039.684740] env[62627]: DEBUG nova.virt.hardware [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1039.684915] env[62627]: DEBUG nova.virt.hardware [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1039.685147] env[62627]: DEBUG nova.virt.hardware [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1039.685859] env[62627]: DEBUG nova.virt.hardware [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1039.685859] env[62627]: DEBUG nova.virt.hardware [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1039.685859] env[62627]: DEBUG nova.virt.hardware [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1039.685859] env[62627]: DEBUG nova.virt.hardware [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1039.686684] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b591810-b3cb-4291-8dc9-972df914a54f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.694358] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c836b95-48e5-4b91-8406-2e3806fb8a53 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.949829] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7fcb77fa-f30d-4201-a41e-5a196fddd83d tempest-ServerMetadataTestJSON-2065088342 tempest-ServerMetadataTestJSON-2065088342-project-member] Lock "58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.107s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.077187] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194795, 'name': CreateVM_Task, 'duration_secs': 0.297941} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.077727] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1040.078913] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.079222] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.079633] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1040.080058] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5e178e3-8138-46e9-9f9c-1291a872dbbe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.087150] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for the task: (returnval){ [ 1040.087150] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520cecb5-801b-0d59-ae2f-49a74141516d" [ 1040.087150] env[62627]: _type = "Task" [ 1040.087150] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.097634] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520cecb5-801b-0d59-ae2f-49a74141516d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.142947] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71368f9-51a6-4630-8958-69ade9438d63 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.150824] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd55f48-5a44-452b-b360-1cb662d4b6d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.182387] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88de4e69-cb26-4ae1-9ae5-53e31349b2b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.192505] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f38cbe-082b-4165-b3d2-5be5da912b7d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.205231] env[62627]: DEBUG nova.compute.provider_tree [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.268705] env[62627]: DEBUG nova.compute.manager [req-60f93e2e-e93b-4f4c-af11-15ff67259692 req-c52a88e4-95ce-47da-be10-5454d8f39d18 service nova] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Received event network-vif-plugged-66cad317-3512-4850-8957-332a2fffb1c0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1040.268961] env[62627]: DEBUG oslo_concurrency.lockutils [req-60f93e2e-e93b-4f4c-af11-15ff67259692 req-c52a88e4-95ce-47da-be10-5454d8f39d18 service nova] Acquiring lock "f8003eee-248c-4005-8465-6e3e96f98f88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.269186] env[62627]: DEBUG oslo_concurrency.lockutils [req-60f93e2e-e93b-4f4c-af11-15ff67259692 req-c52a88e4-95ce-47da-be10-5454d8f39d18 service nova] Lock "f8003eee-248c-4005-8465-6e3e96f98f88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.269350] env[62627]: DEBUG oslo_concurrency.lockutils [req-60f93e2e-e93b-4f4c-af11-15ff67259692 req-c52a88e4-95ce-47da-be10-5454d8f39d18 service nova] Lock "f8003eee-248c-4005-8465-6e3e96f98f88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.269582] env[62627]: DEBUG nova.compute.manager [req-60f93e2e-e93b-4f4c-af11-15ff67259692 req-c52a88e4-95ce-47da-be10-5454d8f39d18 service nova] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] No waiting events found dispatching network-vif-plugged-66cad317-3512-4850-8957-332a2fffb1c0 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1040.269691] env[62627]: WARNING nova.compute.manager [req-60f93e2e-e93b-4f4c-af11-15ff67259692 req-c52a88e4-95ce-47da-be10-5454d8f39d18 service nova] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Received unexpected event network-vif-plugged-66cad317-3512-4850-8957-332a2fffb1c0 for instance with vm_state building and task_state spawning. [ 1040.386713] env[62627]: DEBUG nova.network.neutron [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Successfully updated port: 66cad317-3512-4850-8957-332a2fffb1c0 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1040.597140] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520cecb5-801b-0d59-ae2f-49a74141516d, 'name': SearchDatastore_Task, 'duration_secs': 0.010839} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.597368] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.597599] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1040.597829] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.597974] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.598164] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.598426] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d63971b-ebfd-4b2c-a6a0-0ceada80a5a6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.606488] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.606689] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1040.607409] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb52dde2-6751-4c63-a808-8699adb980cf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.613199] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for the task: (returnval){ [ 1040.613199] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52063941-bb9a-faa4-7158-701fb464358c" [ 1040.613199] env[62627]: _type = "Task" [ 1040.613199] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.620055] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52063941-bb9a-faa4-7158-701fb464358c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.709524] env[62627]: DEBUG nova.scheduler.client.report [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1040.889774] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquiring lock "refresh_cache-f8003eee-248c-4005-8465-6e3e96f98f88" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.889934] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquired lock "refresh_cache-f8003eee-248c-4005-8465-6e3e96f98f88" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.890104] env[62627]: DEBUG nova.network.neutron [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1041.122621] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52063941-bb9a-faa4-7158-701fb464358c, 'name': SearchDatastore_Task, 'duration_secs': 0.00891} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.123447] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6fb2dc3-63cc-4847-81b3-7a97c3419f05 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.129093] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for the task: (returnval){ [ 1041.129093] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd1572-9a87-568b-13e8-ce4dd0f4f2b0" [ 1041.129093] env[62627]: _type = "Task" [ 1041.129093] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.136554] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd1572-9a87-568b-13e8-ce4dd0f4f2b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.215100] env[62627]: DEBUG oslo_concurrency.lockutils [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.795s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.218298] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.536s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.219631] env[62627]: INFO nova.compute.claims [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1041.240705] env[62627]: INFO nova.scheduler.client.report [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Deleted allocations for instance bd583f6b-be8d-4f02-a67e-0ea10e6dd087 [ 1041.428522] env[62627]: DEBUG nova.network.neutron [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1041.565626] env[62627]: DEBUG nova.network.neutron [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Updating instance_info_cache with network_info: [{"id": "66cad317-3512-4850-8957-332a2fffb1c0", "address": "fa:16:3e:e3:dc:02", "network": {"id": "073dffbd-13a9-491a-9a85-f6b0bd5152f5", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1442356389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "512907c88fac4d0eb5aa460036ced32b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cad317-35", "ovs_interfaceid": "66cad317-3512-4850-8957-332a2fffb1c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.641724] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd1572-9a87-568b-13e8-ce4dd0f4f2b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009553} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.642011] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.642116] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 16ac82e3-b9e3-4c34-bb8c-2989cc33246f/16ac82e3-b9e3-4c34-bb8c-2989cc33246f.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.642773] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51decfe0-bb25-4885-abc9-cb3323d60683 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.648607] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for the task: (returnval){ [ 1041.648607] env[62627]: value = "task-2194796" [ 1041.648607] env[62627]: _type = "Task" [ 1041.648607] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.656352] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194796, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.750396] env[62627]: DEBUG oslo_concurrency.lockutils [None req-80003f26-d1e9-4774-bc0c-64bc7f5e2a38 tempest-ImagesOneServerNegativeTestJSON-1314187372 tempest-ImagesOneServerNegativeTestJSON-1314187372-project-member] Lock "bd583f6b-be8d-4f02-a67e-0ea10e6dd087" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.930s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.068667] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Releasing lock "refresh_cache-f8003eee-248c-4005-8465-6e3e96f98f88" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.069070] env[62627]: DEBUG nova.compute.manager [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Instance network_info: |[{"id": "66cad317-3512-4850-8957-332a2fffb1c0", "address": "fa:16:3e:e3:dc:02", "network": {"id": "073dffbd-13a9-491a-9a85-f6b0bd5152f5", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1442356389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "512907c88fac4d0eb5aa460036ced32b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cad317-35", "ovs_interfaceid": "66cad317-3512-4850-8957-332a2fffb1c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1042.069554] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:dc:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66cad317-3512-4850-8957-332a2fffb1c0', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1042.077762] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Creating folder: Project (512907c88fac4d0eb5aa460036ced32b). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1042.078288] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd27fcd7-1622-4cac-b6eb-b519c0a5fecc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.091278] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Created folder: Project (512907c88fac4d0eb5aa460036ced32b) in parent group-v447541. [ 1042.091633] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Creating folder: Instances. Parent ref: group-v447740. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1042.091968] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d42b5149-e868-4ddf-802f-9e1f4189adaf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.102234] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Created folder: Instances in parent group-v447740. [ 1042.102515] env[62627]: DEBUG oslo.service.loopingcall [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1042.102731] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1042.102954] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1b7ef27-be93-4a31-8a8f-df5226e22a0c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.121553] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1042.121553] env[62627]: value = "task-2194799" [ 1042.121553] env[62627]: _type = "Task" [ 1042.121553] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.129027] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194799, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.159017] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194796, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465013} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.159277] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 16ac82e3-b9e3-4c34-bb8c-2989cc33246f/16ac82e3-b9e3-4c34-bb8c-2989cc33246f.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1042.159488] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1042.159728] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d663434-98f7-4c4a-ba62-c2fc653c91b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.166733] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for the task: (returnval){ [ 1042.166733] env[62627]: value = "task-2194800" [ 1042.166733] env[62627]: _type = "Task" [ 1042.166733] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.174792] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194800, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.299959] env[62627]: DEBUG nova.compute.manager [req-22ed7e4d-34d5-4cf5-99b5-830990eedd26 req-9833de12-0824-4ec8-996f-98e422f536d1 service nova] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Received event network-changed-66cad317-3512-4850-8957-332a2fffb1c0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1042.300795] env[62627]: DEBUG nova.compute.manager [req-22ed7e4d-34d5-4cf5-99b5-830990eedd26 req-9833de12-0824-4ec8-996f-98e422f536d1 service nova] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Refreshing instance network info cache due to event network-changed-66cad317-3512-4850-8957-332a2fffb1c0. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1042.300795] env[62627]: DEBUG oslo_concurrency.lockutils [req-22ed7e4d-34d5-4cf5-99b5-830990eedd26 req-9833de12-0824-4ec8-996f-98e422f536d1 service nova] Acquiring lock "refresh_cache-f8003eee-248c-4005-8465-6e3e96f98f88" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.300795] env[62627]: DEBUG oslo_concurrency.lockutils [req-22ed7e4d-34d5-4cf5-99b5-830990eedd26 req-9833de12-0824-4ec8-996f-98e422f536d1 service nova] Acquired lock "refresh_cache-f8003eee-248c-4005-8465-6e3e96f98f88" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.300795] env[62627]: DEBUG nova.network.neutron [req-22ed7e4d-34d5-4cf5-99b5-830990eedd26 req-9833de12-0824-4ec8-996f-98e422f536d1 service nova] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Refreshing network info cache for port 66cad317-3512-4850-8957-332a2fffb1c0 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1042.433676] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebd0150-24ac-41ad-96bf-74977ce4e939 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.441481] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6871467c-5909-4ee5-8cbc-0f63c146c41f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.472665] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77c9712-70e1-4972-be3d-61591ed68297 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.479990] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92834d55-bc69-4f21-8f30-11c553b63e12 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.493282] env[62627]: DEBUG nova.compute.provider_tree [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.638777] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194799, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.678485] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194800, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070656} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.678788] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1042.686576] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5952a920-4eda-442d-bae7-100d1d0ca38c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.710101] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 16ac82e3-b9e3-4c34-bb8c-2989cc33246f/16ac82e3-b9e3-4c34-bb8c-2989cc33246f.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1042.710408] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da317461-10a1-4b7c-a1ed-84a95c542057 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.730512] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for the task: (returnval){ [ 1042.730512] env[62627]: value = "task-2194801" [ 1042.730512] env[62627]: _type = "Task" [ 1042.730512] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.738454] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.996288] env[62627]: DEBUG nova.scheduler.client.report [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1043.032687] env[62627]: DEBUG nova.network.neutron [req-22ed7e4d-34d5-4cf5-99b5-830990eedd26 req-9833de12-0824-4ec8-996f-98e422f536d1 service nova] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Updated VIF entry in instance network info cache for port 66cad317-3512-4850-8957-332a2fffb1c0. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1043.033054] env[62627]: DEBUG nova.network.neutron [req-22ed7e4d-34d5-4cf5-99b5-830990eedd26 req-9833de12-0824-4ec8-996f-98e422f536d1 service nova] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Updating instance_info_cache with network_info: [{"id": "66cad317-3512-4850-8957-332a2fffb1c0", "address": "fa:16:3e:e3:dc:02", "network": {"id": "073dffbd-13a9-491a-9a85-f6b0bd5152f5", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1442356389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "512907c88fac4d0eb5aa460036ced32b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cad317-35", "ovs_interfaceid": "66cad317-3512-4850-8957-332a2fffb1c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.134528] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194799, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.240730] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.501968] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.284s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.502505] env[62627]: DEBUG nova.compute.manager [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1043.506876] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.924s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.508803] env[62627]: INFO nova.compute.claims [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1043.535422] env[62627]: DEBUG oslo_concurrency.lockutils [req-22ed7e4d-34d5-4cf5-99b5-830990eedd26 req-9833de12-0824-4ec8-996f-98e422f536d1 service nova] Releasing lock "refresh_cache-f8003eee-248c-4005-8465-6e3e96f98f88" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.641329] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194799, 'name': CreateVM_Task, 'duration_secs': 1.359373} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.641329] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1043.641329] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.641329] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.641329] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1043.641329] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4dcb1fd-2225-4d6d-8f85-a38754b4031e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.645438] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for the task: (returnval){ [ 1043.645438] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]527ec9fc-7747-521a-fc15-1ff4dd1650f7" [ 1043.645438] env[62627]: _type = "Task" [ 1043.645438] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.655875] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527ec9fc-7747-521a-fc15-1ff4dd1650f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.744453] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194801, 'name': ReconfigVM_Task, 'duration_secs': 0.946546} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.744716] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 16ac82e3-b9e3-4c34-bb8c-2989cc33246f/16ac82e3-b9e3-4c34-bb8c-2989cc33246f.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1043.745380] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35cfa61b-8599-4cc3-b0e2-4fa5b7765a76 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.752416] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for the task: (returnval){ [ 1043.752416] env[62627]: value = "task-2194802" [ 1043.752416] env[62627]: _type = "Task" [ 1043.752416] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.760440] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194802, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.013873] env[62627]: DEBUG nova.compute.utils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1044.017338] env[62627]: DEBUG nova.compute.manager [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1044.017507] env[62627]: DEBUG nova.network.neutron [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1044.062109] env[62627]: DEBUG nova.policy [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d6a2c615f034c49836031736531061c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '01b0f3fe63f64e1183acc0fec83bb3f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1044.158659] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527ec9fc-7747-521a-fc15-1ff4dd1650f7, 'name': SearchDatastore_Task, 'duration_secs': 0.009568} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.158659] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.158901] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1044.159077] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.159304] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.160051] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1044.160051] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ea85d0c-db9e-4fa7-be9d-6ca7e13f23b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.168486] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1044.168693] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1044.169467] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef9a5eac-55b5-4079-886d-74a804f17fbf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.174484] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for the task: (returnval){ [ 1044.174484] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b1fedc-9031-c5ea-eb57-85e1b13f0945" [ 1044.174484] env[62627]: _type = "Task" [ 1044.174484] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.182894] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b1fedc-9031-c5ea-eb57-85e1b13f0945, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.262740] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194802, 'name': Rename_Task, 'duration_secs': 0.134485} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.263017] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.263514] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f7cf9cf-9541-4fe3-9cbb-f82f0519991b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.270811] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for the task: (returnval){ [ 1044.270811] env[62627]: value = "task-2194803" [ 1044.270811] env[62627]: _type = "Task" [ 1044.270811] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.279772] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.340853] env[62627]: DEBUG nova.network.neutron [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Successfully created port: 08a5a113-46c4-4d22-b644-b503cfb32646 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1044.518702] env[62627]: DEBUG nova.compute.manager [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1044.689219] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b1fedc-9031-c5ea-eb57-85e1b13f0945, 'name': SearchDatastore_Task, 'duration_secs': 0.010865} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.691086] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92fce7c0-19b6-49f8-85b0-7d866c82b57c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.702198] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for the task: (returnval){ [ 1044.702198] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52664db1-8a71-c8d0-480e-065097d9c360" [ 1044.702198] env[62627]: _type = "Task" [ 1044.702198] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.712458] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52664db1-8a71-c8d0-480e-065097d9c360, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.780107] env[62627]: DEBUG oslo_vmware.api [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194803, 'name': PowerOnVM_Task, 'duration_secs': 0.443764} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.781169] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1044.781381] env[62627]: INFO nova.compute.manager [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Took 8.40 seconds to spawn the instance on the hypervisor. [ 1044.781559] env[62627]: DEBUG nova.compute.manager [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1044.782300] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e89e65a-16a8-435f-bdc6-2238b260bee0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.785152] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07c3f39-705b-46b4-a12e-4ca7284c968c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.792767] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0532dc4b-84d0-43a9-8b8b-42c6aa1b2c0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.826425] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6828a8c7-0956-4eb4-8241-709acb34876a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.834538] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd8a26c-2722-4d77-a48e-042f571a5ef5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.848485] env[62627]: DEBUG nova.compute.provider_tree [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.214285] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52664db1-8a71-c8d0-480e-065097d9c360, 'name': SearchDatastore_Task, 'duration_secs': 0.011297} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.214582] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.214854] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f8003eee-248c-4005-8465-6e3e96f98f88/f8003eee-248c-4005-8465-6e3e96f98f88.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1045.215125] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2eb70f07-099b-481b-9822-e243147ecede {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.222462] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for the task: (returnval){ [ 1045.222462] env[62627]: value = "task-2194804" [ 1045.222462] env[62627]: _type = "Task" [ 1045.222462] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.231095] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194804, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.307925] env[62627]: INFO nova.compute.manager [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Took 15.05 seconds to build instance. [ 1045.351063] env[62627]: DEBUG nova.scheduler.client.report [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1045.533472] env[62627]: DEBUG nova.compute.manager [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1045.568194] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "e5a1fa80-6579-4334-9705-33a95dbc3797" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.568526] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "e5a1fa80-6579-4334-9705-33a95dbc3797" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.571816] env[62627]: DEBUG nova.virt.hardware [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1045.572124] env[62627]: DEBUG nova.virt.hardware [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1045.572323] env[62627]: DEBUG nova.virt.hardware [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1045.572609] env[62627]: DEBUG nova.virt.hardware [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1045.574356] env[62627]: DEBUG nova.virt.hardware [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1045.574356] env[62627]: DEBUG nova.virt.hardware [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1045.574356] env[62627]: DEBUG nova.virt.hardware [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1045.574356] env[62627]: DEBUG nova.virt.hardware [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1045.574356] env[62627]: DEBUG nova.virt.hardware [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1045.574356] env[62627]: DEBUG nova.virt.hardware [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1045.574356] env[62627]: DEBUG nova.virt.hardware [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1045.576433] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7906791a-2b5e-46b0-8f84-542afe267c67 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.589311] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0cfc57-d891-4cb7-8e00-70cfc1b1f39a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.731950] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194804, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456289} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.732322] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f8003eee-248c-4005-8465-6e3e96f98f88/f8003eee-248c-4005-8465-6e3e96f98f88.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1045.732597] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1045.732889] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf84db74-7205-4977-a04a-a7a1ef86d2e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.739307] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for the task: (returnval){ [ 1045.739307] env[62627]: value = "task-2194805" [ 1045.739307] env[62627]: _type = "Task" [ 1045.739307] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.748863] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194805, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.810113] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3c6ffc6e-569b-4693-bf7f-a3fc811b2ec9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.561s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.829614] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "61f54696-1ce2-4515-ab9c-72347dbd9de0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.829614] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "61f54696-1ce2-4515-ab9c-72347dbd9de0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.839189] env[62627]: DEBUG nova.compute.manager [req-dbcea4f3-f0f6-420d-9fab-6312833d2fdb req-945796cf-bd56-4e31-88ac-7a337488051d service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Received event network-vif-plugged-08a5a113-46c4-4d22-b644-b503cfb32646 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1045.839394] env[62627]: DEBUG oslo_concurrency.lockutils [req-dbcea4f3-f0f6-420d-9fab-6312833d2fdb req-945796cf-bd56-4e31-88ac-7a337488051d service nova] Acquiring lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.839589] env[62627]: DEBUG oslo_concurrency.lockutils [req-dbcea4f3-f0f6-420d-9fab-6312833d2fdb req-945796cf-bd56-4e31-88ac-7a337488051d service nova] Lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.839749] env[62627]: DEBUG oslo_concurrency.lockutils [req-dbcea4f3-f0f6-420d-9fab-6312833d2fdb req-945796cf-bd56-4e31-88ac-7a337488051d service nova] Lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.839911] env[62627]: DEBUG nova.compute.manager [req-dbcea4f3-f0f6-420d-9fab-6312833d2fdb req-945796cf-bd56-4e31-88ac-7a337488051d service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] No waiting events found dispatching network-vif-plugged-08a5a113-46c4-4d22-b644-b503cfb32646 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1045.840077] env[62627]: WARNING nova.compute.manager [req-dbcea4f3-f0f6-420d-9fab-6312833d2fdb req-945796cf-bd56-4e31-88ac-7a337488051d service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Received unexpected event network-vif-plugged-08a5a113-46c4-4d22-b644-b503cfb32646 for instance with vm_state building and task_state spawning. [ 1045.856129] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.856591] env[62627]: DEBUG nova.compute.manager [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1045.859099] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.224s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.859314] env[62627]: DEBUG nova.objects.instance [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lazy-loading 'resources' on Instance uuid e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.938136] env[62627]: DEBUG nova.network.neutron [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Successfully updated port: 08a5a113-46c4-4d22-b644-b503cfb32646 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1046.081790] env[62627]: DEBUG nova.compute.manager [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1046.092446] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.092945] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.169741] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquiring lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.172264] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.172264] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquiring lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.172264] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.172264] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.173437] env[62627]: INFO nova.compute.manager [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Terminating instance [ 1046.248952] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194805, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060949} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.250024] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1046.250206] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b1af82-ddf2-4c90-b7b6-127c4bc26653 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.270999] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] f8003eee-248c-4005-8465-6e3e96f98f88/f8003eee-248c-4005-8465-6e3e96f98f88.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.271254] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9228fe08-f5bb-4608-9875-40679376cabf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.292356] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for the task: (returnval){ [ 1046.292356] env[62627]: value = "task-2194806" [ 1046.292356] env[62627]: _type = "Task" [ 1046.292356] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.300345] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194806, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.331382] env[62627]: DEBUG nova.compute.manager [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1046.362478] env[62627]: DEBUG nova.compute.utils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1046.367655] env[62627]: DEBUG nova.compute.manager [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1046.367847] env[62627]: DEBUG nova.network.neutron [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1046.414340] env[62627]: DEBUG nova.policy [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '315edb528a24402ba5b86c3b5d32de46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd5206f46e638467bb689425ae674683a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1046.441071] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "refresh_cache-e263159d-e9ff-4a19-a29a-cb06eaf2d679" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.441966] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "refresh_cache-e263159d-e9ff-4a19-a29a-cb06eaf2d679" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.441966] env[62627]: DEBUG nova.network.neutron [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1046.595292] env[62627]: DEBUG nova.compute.manager [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1046.606763] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.615544] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9927d3d3-f781-4079-8ca3-3c73d4e2d624 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.627847] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8bee62-9946-43ae-9fbd-f344157da47e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.661061] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4452e7a-f179-43ff-9def-65c9b6fb5b9b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.672125] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d50253b-6cb9-40a6-b89e-946037ccd804 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.680372] env[62627]: DEBUG nova.compute.manager [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1046.680372] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1046.687434] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd37a375-ebc8-4f18-8767-4c62e1e6ab1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.690827] env[62627]: DEBUG nova.compute.provider_tree [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1046.696893] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1046.697420] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8efcccc1-f985-4be0-9763-fbd6a8b3f1d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.703319] env[62627]: DEBUG oslo_vmware.api [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for the task: (returnval){ [ 1046.703319] env[62627]: value = "task-2194807" [ 1046.703319] env[62627]: _type = "Task" [ 1046.703319] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.711652] env[62627]: DEBUG oslo_vmware.api [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194807, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.780367] env[62627]: DEBUG nova.network.neutron [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Successfully created port: 861a18bd-3c64-44b6-9e6b-a78d9449b584 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1046.802774] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194806, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.850737] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.868552] env[62627]: DEBUG nova.compute.manager [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1047.008206] env[62627]: DEBUG nova.network.neutron [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1047.115899] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.213860] env[62627]: ERROR nova.scheduler.client.report [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [req-c6340717-5de8-49ac-a0d1-b5598f693445] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c6340717-5de8-49ac-a0d1-b5598f693445"}]} [ 1047.220551] env[62627]: DEBUG oslo_vmware.api [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194807, 'name': PowerOffVM_Task, 'duration_secs': 0.329172} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.220823] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1047.220997] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1047.221264] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b86445f-ba9d-4325-a59f-08ab5d7c0d79 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.235597] env[62627]: DEBUG nova.scheduler.client.report [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1047.250880] env[62627]: DEBUG nova.scheduler.client.report [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1047.251139] env[62627]: DEBUG nova.compute.provider_tree [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1047.263913] env[62627]: DEBUG nova.scheduler.client.report [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1047.289361] env[62627]: DEBUG nova.scheduler.client.report [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1047.298536] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1047.298757] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1047.298938] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Deleting the datastore file [datastore2] 16ac82e3-b9e3-4c34-bb8c-2989cc33246f {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1047.299989] env[62627]: DEBUG nova.network.neutron [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Updating instance_info_cache with network_info: [{"id": "08a5a113-46c4-4d22-b644-b503cfb32646", "address": "fa:16:3e:f8:9c:ee", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08a5a113-46", "ovs_interfaceid": "08a5a113-46c4-4d22-b644-b503cfb32646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.305409] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1123c884-8ea6-4a05-9b44-752b2c8d6563 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.318467] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194806, 'name': ReconfigVM_Task, 'duration_secs': 0.778298} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.319320] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Reconfigured VM instance instance-00000049 to attach disk [datastore2] f8003eee-248c-4005-8465-6e3e96f98f88/f8003eee-248c-4005-8465-6e3e96f98f88.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1047.320048] env[62627]: DEBUG oslo_vmware.api [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for the task: (returnval){ [ 1047.320048] env[62627]: value = "task-2194809" [ 1047.320048] env[62627]: _type = "Task" [ 1047.320048] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.320268] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca508204-3b82-483f-83d8-ac32a963c8bf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.332465] env[62627]: DEBUG oslo_vmware.api [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194809, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.332763] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for the task: (returnval){ [ 1047.332763] env[62627]: value = "task-2194810" [ 1047.332763] env[62627]: _type = "Task" [ 1047.332763] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.344182] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194810, 'name': Rename_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.531993] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9734807f-3e58-402a-92ad-8952acca64da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.539616] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca93e3d-71ab-4c03-a42d-bdaf991c64d7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.571235] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93531b7f-633f-4333-aa1e-0b6ad2d4acdc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.578524] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad7dd1e-19c2-4a4c-b968-333c47c855dc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.591448] env[62627]: DEBUG nova.compute.provider_tree [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1047.809176] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "refresh_cache-e263159d-e9ff-4a19-a29a-cb06eaf2d679" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.809475] env[62627]: DEBUG nova.compute.manager [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Instance network_info: |[{"id": "08a5a113-46c4-4d22-b644-b503cfb32646", "address": "fa:16:3e:f8:9c:ee", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08a5a113-46", "ovs_interfaceid": "08a5a113-46c4-4d22-b644-b503cfb32646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1047.809896] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:9c:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52f465cb-7418-4172-bd7d-aec00abeb692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08a5a113-46c4-4d22-b644-b503cfb32646', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1047.817494] env[62627]: DEBUG oslo.service.loopingcall [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1047.817726] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1047.818016] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-138341a5-8701-4c28-a493-ce7cf79b5f4c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.840870] env[62627]: DEBUG oslo_vmware.api [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Task: {'id': task-2194809, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221947} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.842323] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1047.842756] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1047.842756] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1047.843124] env[62627]: INFO nova.compute.manager [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1047.843124] env[62627]: DEBUG oslo.service.loopingcall [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1047.843282] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1047.843282] env[62627]: value = "task-2194811" [ 1047.843282] env[62627]: _type = "Task" [ 1047.843282] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.843500] env[62627]: DEBUG nova.compute.manager [-] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1047.843676] env[62627]: DEBUG nova.network.neutron [-] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1047.850365] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194810, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.855874] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194811, 'name': CreateVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.865411] env[62627]: DEBUG nova.compute.manager [req-1b8cd2a8-80c5-4650-8210-b7117dd96ee7 req-e8cd6fcf-77d0-4cfe-a095-77f349537746 service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Received event network-changed-08a5a113-46c4-4d22-b644-b503cfb32646 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1047.865489] env[62627]: DEBUG nova.compute.manager [req-1b8cd2a8-80c5-4650-8210-b7117dd96ee7 req-e8cd6fcf-77d0-4cfe-a095-77f349537746 service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Refreshing instance network info cache due to event network-changed-08a5a113-46c4-4d22-b644-b503cfb32646. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1047.866463] env[62627]: DEBUG oslo_concurrency.lockutils [req-1b8cd2a8-80c5-4650-8210-b7117dd96ee7 req-e8cd6fcf-77d0-4cfe-a095-77f349537746 service nova] Acquiring lock "refresh_cache-e263159d-e9ff-4a19-a29a-cb06eaf2d679" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.866463] env[62627]: DEBUG oslo_concurrency.lockutils [req-1b8cd2a8-80c5-4650-8210-b7117dd96ee7 req-e8cd6fcf-77d0-4cfe-a095-77f349537746 service nova] Acquired lock "refresh_cache-e263159d-e9ff-4a19-a29a-cb06eaf2d679" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.866463] env[62627]: DEBUG nova.network.neutron [req-1b8cd2a8-80c5-4650-8210-b7117dd96ee7 req-e8cd6fcf-77d0-4cfe-a095-77f349537746 service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Refreshing network info cache for port 08a5a113-46c4-4d22-b644-b503cfb32646 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1047.879859] env[62627]: DEBUG nova.compute.manager [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1047.908698] env[62627]: DEBUG nova.virt.hardware [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1047.908954] env[62627]: DEBUG nova.virt.hardware [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.909144] env[62627]: DEBUG nova.virt.hardware [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1047.909335] env[62627]: DEBUG nova.virt.hardware [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.909497] env[62627]: DEBUG nova.virt.hardware [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1047.909647] env[62627]: DEBUG nova.virt.hardware [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1047.909853] env[62627]: DEBUG nova.virt.hardware [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1047.910012] env[62627]: DEBUG nova.virt.hardware [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1047.910208] env[62627]: DEBUG nova.virt.hardware [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1047.910372] env[62627]: DEBUG nova.virt.hardware [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1047.910557] env[62627]: DEBUG nova.virt.hardware [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1047.911439] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e32a4d-b20c-430e-b920-e5e0c9b7371f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.921095] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ec4db8-9d76-43f3-b131-f98887e72dfb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.126342] env[62627]: DEBUG nova.scheduler.client.report [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 94 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1048.126510] env[62627]: DEBUG nova.compute.provider_tree [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 94 to 95 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1048.126591] env[62627]: DEBUG nova.compute.provider_tree [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1048.344705] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194810, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.354673] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194811, 'name': CreateVM_Task, 'duration_secs': 0.343171} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.354843] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1048.355534] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.355719] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.356051] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1048.356304] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a88dbace-438c-4b29-aa8e-a581cd7b4e02 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.360562] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1048.360562] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52bf9bdb-5b1b-0572-b60b-a231a1135582" [ 1048.360562] env[62627]: _type = "Task" [ 1048.360562] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.370296] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52bf9bdb-5b1b-0572-b60b-a231a1135582, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.631497] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.772s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.634421] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.380s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.634970] env[62627]: DEBUG nova.objects.instance [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lazy-loading 'resources' on Instance uuid 07b08283-cbe5-4c36-9127-c423b259643d {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.655428] env[62627]: INFO nova.scheduler.client.report [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleted allocations for instance e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea [ 1048.664506] env[62627]: DEBUG nova.network.neutron [-] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.717968] env[62627]: DEBUG nova.network.neutron [req-1b8cd2a8-80c5-4650-8210-b7117dd96ee7 req-e8cd6fcf-77d0-4cfe-a095-77f349537746 service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Updated VIF entry in instance network info cache for port 08a5a113-46c4-4d22-b644-b503cfb32646. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1048.720133] env[62627]: DEBUG nova.network.neutron [req-1b8cd2a8-80c5-4650-8210-b7117dd96ee7 req-e8cd6fcf-77d0-4cfe-a095-77f349537746 service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Updating instance_info_cache with network_info: [{"id": "08a5a113-46c4-4d22-b644-b503cfb32646", "address": "fa:16:3e:f8:9c:ee", "network": {"id": "cc0a3a8a-1222-4362-a164-11bcef0a8bc1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1517256335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "01b0f3fe63f64e1183acc0fec83bb3f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08a5a113-46", "ovs_interfaceid": "08a5a113-46c4-4d22-b644-b503cfb32646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.738317] env[62627]: DEBUG nova.network.neutron [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Successfully updated port: 861a18bd-3c64-44b6-9e6b-a78d9449b584 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1048.845168] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194810, 'name': Rename_Task, 'duration_secs': 1.167404} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.845388] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.845604] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23612ead-1607-4015-90b3-cafac3de1ca1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.851067] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for the task: (returnval){ [ 1048.851067] env[62627]: value = "task-2194812" [ 1048.851067] env[62627]: _type = "Task" [ 1048.851067] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.858038] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194812, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.868585] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52bf9bdb-5b1b-0572-b60b-a231a1135582, 'name': SearchDatastore_Task, 'duration_secs': 0.010052} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.868887] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.869146] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1048.869375] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.869572] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.869772] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1048.869998] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca3d22cd-4706-4638-ae3e-474058b8d005 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.877726] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1048.877910] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1048.879014] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd3aad92-6f1f-4a4c-a993-5893512c9540 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.885209] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1048.885209] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ab7a28-37c6-0fc8-bb25-0b729a94366c" [ 1048.885209] env[62627]: _type = "Task" [ 1048.885209] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.896214] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ab7a28-37c6-0fc8-bb25-0b729a94366c, 'name': SearchDatastore_Task, 'duration_secs': 0.008392} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.896951] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a4a0f75-151c-4e08-9561-6529dd67d638 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.901511] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1048.901511] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528f74f3-41d4-7f94-85a5-ad600d710182" [ 1048.901511] env[62627]: _type = "Task" [ 1048.901511] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.908669] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528f74f3-41d4-7f94-85a5-ad600d710182, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.137888] env[62627]: DEBUG nova.objects.instance [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lazy-loading 'numa_topology' on Instance uuid 07b08283-cbe5-4c36-9127-c423b259643d {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.164568] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a3604f4-6f17-4d5c-bc87-997035436c17 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.039s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.169012] env[62627]: INFO nova.compute.manager [-] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Took 1.33 seconds to deallocate network for instance. [ 1049.220839] env[62627]: DEBUG oslo_concurrency.lockutils [req-1b8cd2a8-80c5-4650-8210-b7117dd96ee7 req-e8cd6fcf-77d0-4cfe-a095-77f349537746 service nova] Releasing lock "refresh_cache-e263159d-e9ff-4a19-a29a-cb06eaf2d679" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.243077] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquiring lock "refresh_cache-513c755a-424a-4e47-887e-7b315ad2e583" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.243313] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquired lock "refresh_cache-513c755a-424a-4e47-887e-7b315ad2e583" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.243458] env[62627]: DEBUG nova.network.neutron [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1049.361455] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194812, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.411598] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528f74f3-41d4-7f94-85a5-ad600d710182, 'name': SearchDatastore_Task, 'duration_secs': 0.007919} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.411842] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.412128] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] e263159d-e9ff-4a19-a29a-cb06eaf2d679/e263159d-e9ff-4a19-a29a-cb06eaf2d679.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1049.412388] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fedaeab9-833d-43ac-bb9e-480d3fadb325 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.419445] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1049.419445] env[62627]: value = "task-2194813" [ 1049.419445] env[62627]: _type = "Task" [ 1049.419445] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.427071] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194813, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.640246] env[62627]: DEBUG nova.objects.base [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Object Instance<07b08283-cbe5-4c36-9127-c423b259643d> lazy-loaded attributes: resources,numa_topology {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1049.675092] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.784031] env[62627]: DEBUG nova.network.neutron [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1049.863508] env[62627]: DEBUG oslo_vmware.api [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194812, 'name': PowerOnVM_Task, 'duration_secs': 0.509759} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.866010] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.866259] env[62627]: INFO nova.compute.manager [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Took 10.21 seconds to spawn the instance on the hypervisor. [ 1049.866445] env[62627]: DEBUG nova.compute.manager [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1049.867252] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9150fa0-c83c-4e10-8f83-d76a0e63803e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.882663] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10d1829-d26e-4631-bb3a-2a1a07017355 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.889674] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623c898f-bb16-4dc0-aa2b-523f6c4d9338 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.894800] env[62627]: DEBUG nova.compute.manager [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Received event network-vif-deleted-3597f9e3-4248-4b24-b7ca-bce13900128a {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1049.895014] env[62627]: DEBUG nova.compute.manager [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Received event network-vif-plugged-861a18bd-3c64-44b6-9e6b-a78d9449b584 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1049.895213] env[62627]: DEBUG oslo_concurrency.lockutils [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] Acquiring lock "513c755a-424a-4e47-887e-7b315ad2e583-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.895411] env[62627]: DEBUG oslo_concurrency.lockutils [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] Lock "513c755a-424a-4e47-887e-7b315ad2e583-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.895574] env[62627]: DEBUG oslo_concurrency.lockutils [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] Lock "513c755a-424a-4e47-887e-7b315ad2e583-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.895776] env[62627]: DEBUG nova.compute.manager [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] No waiting events found dispatching network-vif-plugged-861a18bd-3c64-44b6-9e6b-a78d9449b584 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1049.895953] env[62627]: WARNING nova.compute.manager [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Received unexpected event network-vif-plugged-861a18bd-3c64-44b6-9e6b-a78d9449b584 for instance with vm_state building and task_state spawning. [ 1049.896128] env[62627]: DEBUG nova.compute.manager [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Received event network-changed-861a18bd-3c64-44b6-9e6b-a78d9449b584 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1049.896283] env[62627]: DEBUG nova.compute.manager [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Refreshing instance network info cache due to event network-changed-861a18bd-3c64-44b6-9e6b-a78d9449b584. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1049.896445] env[62627]: DEBUG oslo_concurrency.lockutils [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] Acquiring lock "refresh_cache-513c755a-424a-4e47-887e-7b315ad2e583" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.931380] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5337d06-b921-4c74-bb87-8aa45193a171 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.939534] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194813, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455143} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.941426] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] e263159d-e9ff-4a19-a29a-cb06eaf2d679/e263159d-e9ff-4a19-a29a-cb06eaf2d679.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1049.941890] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1049.941890] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c0cc978-514c-4b73-93fa-b1bc66314b46 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.944713] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db79912e-09ea-444b-9f7e-4a41c434ee60 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.958630] env[62627]: DEBUG nova.compute.provider_tree [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.960913] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1049.960913] env[62627]: value = "task-2194814" [ 1049.960913] env[62627]: _type = "Task" [ 1049.960913] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.968937] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194814, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.971306] env[62627]: DEBUG nova.network.neutron [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Updating instance_info_cache with network_info: [{"id": "861a18bd-3c64-44b6-9e6b-a78d9449b584", "address": "fa:16:3e:66:93:f9", "network": {"id": "df1ea580-9d30-4a58-b115-4e87a51656ae", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1521957101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5206f46e638467bb689425ae674683a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "34a581cb-6d33-4e2e-af50-735a6749d6da", "external-id": "nsx-vlan-transportzone-673", "segmentation_id": 673, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861a18bd-3c", "ovs_interfaceid": "861a18bd-3c64-44b6-9e6b-a78d9449b584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.386766] env[62627]: INFO nova.compute.manager [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Took 18.39 seconds to build instance. [ 1050.462419] env[62627]: DEBUG nova.scheduler.client.report [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1050.475679] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Releasing lock "refresh_cache-513c755a-424a-4e47-887e-7b315ad2e583" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.475994] env[62627]: DEBUG nova.compute.manager [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Instance network_info: |[{"id": "861a18bd-3c64-44b6-9e6b-a78d9449b584", "address": "fa:16:3e:66:93:f9", "network": {"id": "df1ea580-9d30-4a58-b115-4e87a51656ae", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1521957101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5206f46e638467bb689425ae674683a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "34a581cb-6d33-4e2e-af50-735a6749d6da", "external-id": "nsx-vlan-transportzone-673", "segmentation_id": 673, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861a18bd-3c", "ovs_interfaceid": "861a18bd-3c64-44b6-9e6b-a78d9449b584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1050.476315] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194814, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06071} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.477122] env[62627]: DEBUG oslo_concurrency.lockutils [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] Acquired lock "refresh_cache-513c755a-424a-4e47-887e-7b315ad2e583" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.477335] env[62627]: DEBUG nova.network.neutron [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Refreshing network info cache for port 861a18bd-3c64-44b6-9e6b-a78d9449b584 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1050.478306] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:93:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '34a581cb-6d33-4e2e-af50-735a6749d6da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '861a18bd-3c64-44b6-9e6b-a78d9449b584', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1050.485492] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Creating folder: Project (d5206f46e638467bb689425ae674683a). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1050.485714] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1050.486557] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1ca2175-124d-44cb-9194-9db5b8c6c506 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.488645] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d87b702-4c8d-4af8-b51a-20a79b6512a6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.510879] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] e263159d-e9ff-4a19-a29a-cb06eaf2d679/e263159d-e9ff-4a19-a29a-cb06eaf2d679.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1050.512279] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f074119e-588c-49ae-84cb-44b859b21bad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.526628] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Created folder: Project (d5206f46e638467bb689425ae674683a) in parent group-v447541. [ 1050.526808] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Creating folder: Instances. Parent ref: group-v447744. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1050.528121] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-928970a2-a5b3-4d4f-912d-17493fe418ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.536733] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1050.536733] env[62627]: value = "task-2194817" [ 1050.536733] env[62627]: _type = "Task" [ 1050.536733] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.540900] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Created folder: Instances in parent group-v447744. [ 1050.541133] env[62627]: DEBUG oslo.service.loopingcall [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1050.541749] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1050.542294] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16db9709-b4ce-4caf-a540-160f5bbe2383 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.561322] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194817, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.566379] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1050.566379] env[62627]: value = "task-2194818" [ 1050.566379] env[62627]: _type = "Task" [ 1050.566379] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.574593] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194818, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.888908] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6541966b-79dc-4afd-bf69-1d8ea704bbef tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "f8003eee-248c-4005-8465-6e3e96f98f88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.910s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.972096] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.337s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.974178] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.368s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.975611] env[62627]: INFO nova.compute.claims [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1051.032482] env[62627]: DEBUG oslo_concurrency.lockutils [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquiring lock "f8003eee-248c-4005-8465-6e3e96f98f88" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.032730] env[62627]: DEBUG oslo_concurrency.lockutils [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "f8003eee-248c-4005-8465-6e3e96f98f88" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.032960] env[62627]: INFO nova.compute.manager [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Rebooting instance [ 1051.046915] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194817, 'name': ReconfigVM_Task, 'duration_secs': 0.380498} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.047175] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Reconfigured VM instance instance-0000004a to attach disk [datastore2] e263159d-e9ff-4a19-a29a-cb06eaf2d679/e263159d-e9ff-4a19-a29a-cb06eaf2d679.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1051.047766] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46481102-c35b-4632-85f7-bdc2584993f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.055739] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1051.055739] env[62627]: value = "task-2194819" [ 1051.055739] env[62627]: _type = "Task" [ 1051.055739] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.062994] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194819, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.073958] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194818, 'name': CreateVM_Task, 'duration_secs': 0.365693} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.074122] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1051.074727] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.074885] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.075210] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1051.075432] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cde6e678-e458-469f-ac51-2ab6666598dc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.079775] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for the task: (returnval){ [ 1051.079775] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c59751-0de8-b112-529a-46d6f1d1b77f" [ 1051.079775] env[62627]: _type = "Task" [ 1051.079775] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.087095] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c59751-0de8-b112-529a-46d6f1d1b77f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.180893] env[62627]: DEBUG nova.network.neutron [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Updated VIF entry in instance network info cache for port 861a18bd-3c64-44b6-9e6b-a78d9449b584. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1051.181288] env[62627]: DEBUG nova.network.neutron [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Updating instance_info_cache with network_info: [{"id": "861a18bd-3c64-44b6-9e6b-a78d9449b584", "address": "fa:16:3e:66:93:f9", "network": {"id": "df1ea580-9d30-4a58-b115-4e87a51656ae", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1521957101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5206f46e638467bb689425ae674683a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "34a581cb-6d33-4e2e-af50-735a6749d6da", "external-id": "nsx-vlan-transportzone-673", "segmentation_id": 673, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861a18bd-3c", "ovs_interfaceid": "861a18bd-3c64-44b6-9e6b-a78d9449b584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.483854] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7803a02e-ead2-4f41-8018-fc0fb0f95cc7 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "07b08283-cbe5-4c36-9127-c423b259643d" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 34.227s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.484626] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "07b08283-cbe5-4c36-9127-c423b259643d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 12.884s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.484838] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "07b08283-cbe5-4c36-9127-c423b259643d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.485049] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "07b08283-cbe5-4c36-9127-c423b259643d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.485965] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "07b08283-cbe5-4c36-9127-c423b259643d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.487010] env[62627]: INFO nova.compute.manager [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Terminating instance [ 1051.549802] env[62627]: DEBUG oslo_concurrency.lockutils [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquiring lock "refresh_cache-f8003eee-248c-4005-8465-6e3e96f98f88" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.549986] env[62627]: DEBUG oslo_concurrency.lockutils [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquired lock "refresh_cache-f8003eee-248c-4005-8465-6e3e96f98f88" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.550250] env[62627]: DEBUG nova.network.neutron [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1051.566253] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194819, 'name': Rename_Task, 'duration_secs': 0.143276} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.566494] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1051.566720] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53937199-d6c5-49ed-b0c0-fef84e9ee6b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.573284] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1051.573284] env[62627]: value = "task-2194820" [ 1051.573284] env[62627]: _type = "Task" [ 1051.573284] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.580817] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194820, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.588582] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c59751-0de8-b112-529a-46d6f1d1b77f, 'name': SearchDatastore_Task, 'duration_secs': 0.012189} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.588867] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.589217] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1051.589479] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.589628] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.589848] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1051.590128] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a858b73c-d9f0-4800-9751-0a352a52b335 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.599593] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1051.599760] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1051.600572] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d240cbb6-cb47-442a-a86c-729921a81f5b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.606381] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for the task: (returnval){ [ 1051.606381] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5279b12b-932d-d5bc-2762-ccd7915832c4" [ 1051.606381] env[62627]: _type = "Task" [ 1051.606381] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.614821] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5279b12b-932d-d5bc-2762-ccd7915832c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.684014] env[62627]: DEBUG oslo_concurrency.lockutils [req-18569b45-0c9b-4a3f-a872-6d2df5740d9d req-ef857572-12ed-4610-8a22-46f6b2eec31f service nova] Releasing lock "refresh_cache-513c755a-424a-4e47-887e-7b315ad2e583" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.990345] env[62627]: DEBUG nova.compute.manager [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1051.990881] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1051.990931] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c55fedea-48cd-4892-bd1e-40b8ce9a90c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.000418] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14364a19-de3b-4180-af5f-69526b549a48 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.036095] env[62627]: WARNING nova.virt.vmwareapi.vmops [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 07b08283-cbe5-4c36-9127-c423b259643d could not be found. [ 1052.036305] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1052.036488] env[62627]: INFO nova.compute.manager [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1052.036754] env[62627]: DEBUG oslo.service.loopingcall [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1052.039522] env[62627]: DEBUG nova.compute.manager [-] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1052.039631] env[62627]: DEBUG nova.network.neutron [-] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1052.084806] env[62627]: DEBUG oslo_vmware.api [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194820, 'name': PowerOnVM_Task, 'duration_secs': 0.468889} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.085094] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1052.085301] env[62627]: INFO nova.compute.manager [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Took 6.55 seconds to spawn the instance on the hypervisor. [ 1052.085477] env[62627]: DEBUG nova.compute.manager [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1052.086426] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d99eb8-8591-48ea-86ae-8286726789b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.117515] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5279b12b-932d-d5bc-2762-ccd7915832c4, 'name': SearchDatastore_Task, 'duration_secs': 0.009377} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.117515] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a81f2e6e-dd46-4b86-8bfc-f3b5042502d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.122188] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for the task: (returnval){ [ 1052.122188] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52216e87-bd91-6a55-59b7-3da6e8dd66fc" [ 1052.122188] env[62627]: _type = "Task" [ 1052.122188] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.130875] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52216e87-bd91-6a55-59b7-3da6e8dd66fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.233550] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33839555-2426-4b6f-8ca1-c7df741a52e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.241492] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0b33e1-7368-4319-ba58-549dca3466f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.274750] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50eb5ddf-f105-4958-8000-cabc5522ed76 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.282871] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de84cbea-8052-48a7-95f7-0d439ed10ed5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.298606] env[62627]: DEBUG nova.compute.provider_tree [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.413741] env[62627]: DEBUG nova.network.neutron [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Updating instance_info_cache with network_info: [{"id": "66cad317-3512-4850-8957-332a2fffb1c0", "address": "fa:16:3e:e3:dc:02", "network": {"id": "073dffbd-13a9-491a-9a85-f6b0bd5152f5", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1442356389-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "512907c88fac4d0eb5aa460036ced32b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cad317-35", "ovs_interfaceid": "66cad317-3512-4850-8957-332a2fffb1c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.607887] env[62627]: INFO nova.compute.manager [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Took 17.94 seconds to build instance. [ 1052.633405] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52216e87-bd91-6a55-59b7-3da6e8dd66fc, 'name': SearchDatastore_Task, 'duration_secs': 0.010053} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.634032] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.634032] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 513c755a-424a-4e47-887e-7b315ad2e583/513c755a-424a-4e47-887e-7b315ad2e583.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1052.634265] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48f32748-04d6-4292-a629-c2f21a6c72b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.641723] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for the task: (returnval){ [ 1052.641723] env[62627]: value = "task-2194821" [ 1052.641723] env[62627]: _type = "Task" [ 1052.641723] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.649149] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.804795] env[62627]: DEBUG nova.scheduler.client.report [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1052.920309] env[62627]: DEBUG oslo_concurrency.lockutils [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Releasing lock "refresh_cache-f8003eee-248c-4005-8465-6e3e96f98f88" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.940179] env[62627]: DEBUG nova.network.neutron [-] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.110161] env[62627]: DEBUG oslo_concurrency.lockutils [None req-52572737-81ea-40df-90fa-33ed60661748 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.460s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.152968] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194821, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468489} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.153241] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 513c755a-424a-4e47-887e-7b315ad2e583/513c755a-424a-4e47-887e-7b315ad2e583.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1053.153445] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1053.153692] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c73537a-f2c2-4158-a0e9-7b83bdf072e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.160282] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for the task: (returnval){ [ 1053.160282] env[62627]: value = "task-2194822" [ 1053.160282] env[62627]: _type = "Task" [ 1053.160282] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.168199] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194822, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.310802] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.311403] env[62627]: DEBUG nova.compute.manager [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1053.314711] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.464s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.316513] env[62627]: INFO nova.compute.claims [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1053.425032] env[62627]: DEBUG nova.compute.manager [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1053.425806] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5df0c7e-eefc-445b-b386-5966376ec66d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.443015] env[62627]: INFO nova.compute.manager [-] [instance: 07b08283-cbe5-4c36-9127-c423b259643d] Took 1.40 seconds to deallocate network for instance. [ 1053.670923] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194822, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083651} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.670923] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1053.671676] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac53c65-e636-4368-b76b-d791ed7349d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.693791] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 513c755a-424a-4e47-887e-7b315ad2e583/513c755a-424a-4e47-887e-7b315ad2e583.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1053.694064] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1b5f2a1-cf6b-457a-bebf-428cadd8eb7c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.713031] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for the task: (returnval){ [ 1053.713031] env[62627]: value = "task-2194823" [ 1053.713031] env[62627]: _type = "Task" [ 1053.713031] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.720671] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194823, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.820825] env[62627]: DEBUG nova.compute.utils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1053.822757] env[62627]: DEBUG nova.compute.manager [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1053.822757] env[62627]: DEBUG nova.network.neutron [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1053.863683] env[62627]: DEBUG nova.policy [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '939b4ef10c614698b1d9232c01d56905', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6324a68b7914b08847262f7773460cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1054.118314] env[62627]: DEBUG nova.compute.manager [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.119286] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea935cb-d91e-42ad-a243-b1c49a46b5a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.168379] env[62627]: DEBUG nova.network.neutron [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Successfully created port: 6e993a7b-e7ba-4beb-ac82-cd2f2b111371 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1054.226654] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194823, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.327499] env[62627]: DEBUG nova.compute.manager [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1054.441026] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a665b9-f7a5-4381-9ac7-1844337cf748 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.451774] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Doing hard reboot of VM {{(pid=62627) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1054.452102] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-460e3871-7907-4ccf-91a7-088287297998 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.460625] env[62627]: DEBUG oslo_vmware.api [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for the task: (returnval){ [ 1054.460625] env[62627]: value = "task-2194824" [ 1054.460625] env[62627]: _type = "Task" [ 1054.460625] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.468946] env[62627]: DEBUG oslo_vmware.api [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194824, 'name': ResetVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.477121] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8dd7e758-c587-4b7f-bd35-0248787568bd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "07b08283-cbe5-4c36-9127-c423b259643d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.992s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.576795] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7840d307-7cf5-47f8-b9c3-1725212add08 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.586418] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3618fc-0812-417a-9356-9e93ced8c473 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.618024] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f189076a-8395-404a-9432-cb493e8aa6d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.625090] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b96f197-44c5-47e9-b4bb-2b2450e4248a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.631224] env[62627]: INFO nova.compute.manager [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] instance snapshotting [ 1054.640840] env[62627]: DEBUG nova.compute.provider_tree [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.643027] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a987666-e43a-490f-8b50-6d0bc5ef0ef9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.662814] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366e27b2-5d18-497f-87cb-ed7923a6b53a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.723192] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194823, 'name': ReconfigVM_Task, 'duration_secs': 0.890482} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.723412] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 513c755a-424a-4e47-887e-7b315ad2e583/513c755a-424a-4e47-887e-7b315ad2e583.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1054.724065] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f69c063-1ee1-4035-b0e8-bc12950f6cac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.730058] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for the task: (returnval){ [ 1054.730058] env[62627]: value = "task-2194825" [ 1054.730058] env[62627]: _type = "Task" [ 1054.730058] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.738731] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194825, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.972310] env[62627]: DEBUG oslo_vmware.api [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194824, 'name': ResetVM_Task, 'duration_secs': 0.105621} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.972623] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Did hard reboot of VM {{(pid=62627) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1054.972865] env[62627]: DEBUG nova.compute.manager [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.973661] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13754b69-b1a9-457f-968d-f60b5df468b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.147504] env[62627]: DEBUG nova.scheduler.client.report [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1055.173219] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1055.173511] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-13a73b02-0afb-4a32-a766-831e5209e8aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.181168] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1055.181168] env[62627]: value = "task-2194826" [ 1055.181168] env[62627]: _type = "Task" [ 1055.181168] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.188643] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194826, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.239401] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194825, 'name': Rename_Task, 'duration_secs': 0.195029} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.239658] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.239891] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-561d5a17-c53a-4de0-8874-d363ef465a72 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.245977] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for the task: (returnval){ [ 1055.245977] env[62627]: value = "task-2194827" [ 1055.245977] env[62627]: _type = "Task" [ 1055.245977] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.254939] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194827, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.339036] env[62627]: DEBUG nova.compute.manager [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1055.362765] env[62627]: DEBUG nova.virt.hardware [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1055.362922] env[62627]: DEBUG nova.virt.hardware [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1055.363031] env[62627]: DEBUG nova.virt.hardware [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1055.363264] env[62627]: DEBUG nova.virt.hardware [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1055.363382] env[62627]: DEBUG nova.virt.hardware [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1055.363550] env[62627]: DEBUG nova.virt.hardware [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1055.363785] env[62627]: DEBUG nova.virt.hardware [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1055.364015] env[62627]: DEBUG nova.virt.hardware [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1055.364202] env[62627]: DEBUG nova.virt.hardware [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1055.364366] env[62627]: DEBUG nova.virt.hardware [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1055.364536] env[62627]: DEBUG nova.virt.hardware [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1055.365410] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6cebcf4-4e84-49f1-9447-6dba71021dec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.373610] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da288b1-7d08-4722-b626-efa39452d1ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.487580] env[62627]: DEBUG oslo_concurrency.lockutils [None req-828e97c2-c143-493a-985a-79aa0caa6ca5 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "f8003eee-248c-4005-8465-6e3e96f98f88" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.454s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.590416] env[62627]: DEBUG nova.compute.manager [req-fef10b5e-e417-4259-a310-bc152a932864 req-febd22fd-2afa-4e5f-90b6-44316a2e9f58 service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Received event network-vif-plugged-6e993a7b-e7ba-4beb-ac82-cd2f2b111371 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1055.590416] env[62627]: DEBUG oslo_concurrency.lockutils [req-fef10b5e-e417-4259-a310-bc152a932864 req-febd22fd-2afa-4e5f-90b6-44316a2e9f58 service nova] Acquiring lock "e5a1fa80-6579-4334-9705-33a95dbc3797-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.590416] env[62627]: DEBUG oslo_concurrency.lockutils [req-fef10b5e-e417-4259-a310-bc152a932864 req-febd22fd-2afa-4e5f-90b6-44316a2e9f58 service nova] Lock "e5a1fa80-6579-4334-9705-33a95dbc3797-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.590416] env[62627]: DEBUG oslo_concurrency.lockutils [req-fef10b5e-e417-4259-a310-bc152a932864 req-febd22fd-2afa-4e5f-90b6-44316a2e9f58 service nova] Lock "e5a1fa80-6579-4334-9705-33a95dbc3797-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.590416] env[62627]: DEBUG nova.compute.manager [req-fef10b5e-e417-4259-a310-bc152a932864 req-febd22fd-2afa-4e5f-90b6-44316a2e9f58 service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] No waiting events found dispatching network-vif-plugged-6e993a7b-e7ba-4beb-ac82-cd2f2b111371 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1055.590416] env[62627]: WARNING nova.compute.manager [req-fef10b5e-e417-4259-a310-bc152a932864 req-febd22fd-2afa-4e5f-90b6-44316a2e9f58 service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Received unexpected event network-vif-plugged-6e993a7b-e7ba-4beb-ac82-cd2f2b111371 for instance with vm_state building and task_state spawning. [ 1055.656138] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.339s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.656138] env[62627]: DEBUG nova.compute.manager [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1055.659710] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.545s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.661305] env[62627]: INFO nova.compute.claims [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1055.693985] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194826, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.720037] env[62627]: DEBUG nova.network.neutron [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Successfully updated port: 6e993a7b-e7ba-4beb-ac82-cd2f2b111371 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1055.756923] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194827, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.939987] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.940230] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.959835] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquiring lock "f8003eee-248c-4005-8465-6e3e96f98f88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.960083] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "f8003eee-248c-4005-8465-6e3e96f98f88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.960296] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquiring lock "f8003eee-248c-4005-8465-6e3e96f98f88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.960809] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "f8003eee-248c-4005-8465-6e3e96f98f88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.961026] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "f8003eee-248c-4005-8465-6e3e96f98f88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.962885] env[62627]: INFO nova.compute.manager [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Terminating instance [ 1056.161387] env[62627]: DEBUG nova.compute.utils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1056.164081] env[62627]: DEBUG nova.compute.manager [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1056.164081] env[62627]: DEBUG nova.network.neutron [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1056.194330] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194826, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.223409] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "refresh_cache-e5a1fa80-6579-4334-9705-33a95dbc3797" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.223632] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired lock "refresh_cache-e5a1fa80-6579-4334-9705-33a95dbc3797" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.223852] env[62627]: DEBUG nova.network.neutron [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1056.258506] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194827, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.435281] env[62627]: DEBUG nova.policy [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '939b4ef10c614698b1d9232c01d56905', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6324a68b7914b08847262f7773460cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1056.442434] env[62627]: DEBUG nova.compute.manager [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1056.466941] env[62627]: DEBUG nova.compute.manager [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1056.467187] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.468094] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508e54f5-3b6c-4938-81b7-70a7c9ba4544 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.476859] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.477159] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ae2efa3-25a8-4e95-941c-dfcafaf4a94b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.484367] env[62627]: DEBUG oslo_vmware.api [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for the task: (returnval){ [ 1056.484367] env[62627]: value = "task-2194828" [ 1056.484367] env[62627]: _type = "Task" [ 1056.484367] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.495972] env[62627]: DEBUG oslo_vmware.api [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194828, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.667516] env[62627]: DEBUG nova.compute.manager [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1056.693524] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194826, 'name': CreateSnapshot_Task, 'duration_secs': 1.078733} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.693838] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1056.694834] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a034109-c562-4517-9f12-9b1a4e589de1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.758786] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194827, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.786324] env[62627]: DEBUG nova.network.neutron [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1056.789283] env[62627]: DEBUG nova.network.neutron [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Successfully created port: 1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1056.957018] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109939cb-e2fe-43bf-91f4-5d2b637e9945 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.962956] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c824903-e7ee-4dca-a1bb-0fafe8406e87 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.972017] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.004661] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ed1fa2-5d24-4262-9ed0-305a65433fd7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.015234] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f560335-0f32-4032-ae56-871b54bc0888 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.020561] env[62627]: DEBUG oslo_vmware.api [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194828, 'name': PowerOffVM_Task, 'duration_secs': 0.17194} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.021203] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1057.021436] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.022049] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28a03ea9-a270-4b24-9be9-ed172fbf1eed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.036059] env[62627]: DEBUG nova.compute.provider_tree [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1057.071853] env[62627]: DEBUG nova.network.neutron [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Updating instance_info_cache with network_info: [{"id": "6e993a7b-e7ba-4beb-ac82-cd2f2b111371", "address": "fa:16:3e:05:08:7e", "network": {"id": "d8124016-8788-4799-b1a6-b95e841f2a03", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-914036947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6324a68b7914b08847262f7773460cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c29724c-5452-441a-8060-5bf89d1f5847", "external-id": "nsx-vlan-transportzone-683", "segmentation_id": 683, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e993a7b-e7", "ovs_interfaceid": "6e993a7b-e7ba-4beb-ac82-cd2f2b111371", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.100007] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.100238] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.100422] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Deleting the datastore file [datastore2] f8003eee-248c-4005-8465-6e3e96f98f88 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.100679] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92bbf447-f489-4066-a5f3-1c93af87fec0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.107502] env[62627]: DEBUG oslo_vmware.api [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for the task: (returnval){ [ 1057.107502] env[62627]: value = "task-2194830" [ 1057.107502] env[62627]: _type = "Task" [ 1057.107502] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.114629] env[62627]: DEBUG oslo_vmware.api [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194830, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.215629] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1057.216660] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ad113f1c-6658-44c3-909a-72a5cff99601 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.226367] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1057.226367] env[62627]: value = "task-2194831" [ 1057.226367] env[62627]: _type = "Task" [ 1057.226367] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.234671] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194831, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.257430] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194827, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.539409] env[62627]: DEBUG nova.scheduler.client.report [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1057.556938] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "e71794b9-22eb-4643-8587-f12a198024a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.557223] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "e71794b9-22eb-4643-8587-f12a198024a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.574754] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Releasing lock "refresh_cache-e5a1fa80-6579-4334-9705-33a95dbc3797" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.575082] env[62627]: DEBUG nova.compute.manager [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Instance network_info: |[{"id": "6e993a7b-e7ba-4beb-ac82-cd2f2b111371", "address": "fa:16:3e:05:08:7e", "network": {"id": "d8124016-8788-4799-b1a6-b95e841f2a03", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-914036947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6324a68b7914b08847262f7773460cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c29724c-5452-441a-8060-5bf89d1f5847", "external-id": "nsx-vlan-transportzone-683", "segmentation_id": 683, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e993a7b-e7", "ovs_interfaceid": "6e993a7b-e7ba-4beb-ac82-cd2f2b111371", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1057.575678] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:08:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c29724c-5452-441a-8060-5bf89d1f5847', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e993a7b-e7ba-4beb-ac82-cd2f2b111371', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1057.583090] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Creating folder: Project (d6324a68b7914b08847262f7773460cf). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1057.583588] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d796acf-359d-47a4-b0cc-67c3ec532093 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.594550] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Created folder: Project (d6324a68b7914b08847262f7773460cf) in parent group-v447541. [ 1057.594736] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Creating folder: Instances. Parent ref: group-v447749. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1057.594961] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f64e0cf-751c-4ee3-8f5a-9dd387590f93 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.606475] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Created folder: Instances in parent group-v447749. [ 1057.606725] env[62627]: DEBUG oslo.service.loopingcall [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.606945] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1057.607174] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-956cc88c-2e18-45d3-a0b5-49dde141ee7c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.623059] env[62627]: DEBUG nova.compute.manager [req-c3bfbcae-8a9b-4550-b375-bd8b0599cf08 req-bc9ddf66-af7b-4b45-a352-14987f0db1ac service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Received event network-changed-6e993a7b-e7ba-4beb-ac82-cd2f2b111371 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1057.623253] env[62627]: DEBUG nova.compute.manager [req-c3bfbcae-8a9b-4550-b375-bd8b0599cf08 req-bc9ddf66-af7b-4b45-a352-14987f0db1ac service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Refreshing instance network info cache due to event network-changed-6e993a7b-e7ba-4beb-ac82-cd2f2b111371. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1057.623456] env[62627]: DEBUG oslo_concurrency.lockutils [req-c3bfbcae-8a9b-4550-b375-bd8b0599cf08 req-bc9ddf66-af7b-4b45-a352-14987f0db1ac service nova] Acquiring lock "refresh_cache-e5a1fa80-6579-4334-9705-33a95dbc3797" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.623595] env[62627]: DEBUG oslo_concurrency.lockutils [req-c3bfbcae-8a9b-4550-b375-bd8b0599cf08 req-bc9ddf66-af7b-4b45-a352-14987f0db1ac service nova] Acquired lock "refresh_cache-e5a1fa80-6579-4334-9705-33a95dbc3797" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.623760] env[62627]: DEBUG nova.network.neutron [req-c3bfbcae-8a9b-4550-b375-bd8b0599cf08 req-bc9ddf66-af7b-4b45-a352-14987f0db1ac service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Refreshing network info cache for port 6e993a7b-e7ba-4beb-ac82-cd2f2b111371 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1057.633100] env[62627]: DEBUG oslo_vmware.api [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Task: {'id': task-2194830, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130548} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.634706] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.634896] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.635097] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.635262] env[62627]: INFO nova.compute.manager [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1057.635490] env[62627]: DEBUG oslo.service.loopingcall [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.635668] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1057.635668] env[62627]: value = "task-2194834" [ 1057.635668] env[62627]: _type = "Task" [ 1057.635668] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.636146] env[62627]: DEBUG nova.compute.manager [-] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1057.636260] env[62627]: DEBUG nova.network.neutron [-] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.646277] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194834, 'name': CreateVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.676639] env[62627]: DEBUG nova.compute.manager [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1057.705445] env[62627]: DEBUG nova.virt.hardware [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1057.705716] env[62627]: DEBUG nova.virt.hardware [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1057.705920] env[62627]: DEBUG nova.virt.hardware [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1057.706210] env[62627]: DEBUG nova.virt.hardware [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1057.706416] env[62627]: DEBUG nova.virt.hardware [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1057.706637] env[62627]: DEBUG nova.virt.hardware [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1057.706928] env[62627]: DEBUG nova.virt.hardware [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1057.707168] env[62627]: DEBUG nova.virt.hardware [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1057.707414] env[62627]: DEBUG nova.virt.hardware [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1057.707652] env[62627]: DEBUG nova.virt.hardware [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1057.707875] env[62627]: DEBUG nova.virt.hardware [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1057.709280] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c960e8-c125-4d1f-af63-5b0d50f4c965 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.718389] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4a7a34-ab6c-4f0d-91f4-7b8d94d94d9b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.742032] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194831, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.758573] env[62627]: DEBUG oslo_vmware.api [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194827, 'name': PowerOnVM_Task, 'duration_secs': 2.369713} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.758870] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1057.759093] env[62627]: INFO nova.compute.manager [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Took 9.88 seconds to spawn the instance on the hypervisor. [ 1057.759273] env[62627]: DEBUG nova.compute.manager [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1057.760037] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491ae5cc-2ec8-461e-9842-a67e20bc5607 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.045661] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.385s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.045661] env[62627]: DEBUG nova.compute.manager [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1058.049486] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.375s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.049701] env[62627]: DEBUG nova.objects.instance [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Lazy-loading 'resources' on Instance uuid 16ac82e3-b9e3-4c34-bb8c-2989cc33246f {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.060496] env[62627]: DEBUG nova.compute.manager [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1058.148596] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194834, 'name': CreateVM_Task, 'duration_secs': 0.340231} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.148816] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1058.149542] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.149758] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.150108] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1058.150392] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49d6ae7a-5ca9-4657-b092-c08eb71d5160 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.154814] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1058.154814] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d0ce99-c419-5c3f-9484-4e32af71f5b7" [ 1058.154814] env[62627]: _type = "Task" [ 1058.154814] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.162493] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d0ce99-c419-5c3f-9484-4e32af71f5b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.244561] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194831, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.279382] env[62627]: INFO nova.compute.manager [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Took 21.71 seconds to build instance. [ 1058.355630] env[62627]: DEBUG nova.network.neutron [-] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.442036] env[62627]: DEBUG nova.network.neutron [req-c3bfbcae-8a9b-4550-b375-bd8b0599cf08 req-bc9ddf66-af7b-4b45-a352-14987f0db1ac service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Updated VIF entry in instance network info cache for port 6e993a7b-e7ba-4beb-ac82-cd2f2b111371. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1058.442406] env[62627]: DEBUG nova.network.neutron [req-c3bfbcae-8a9b-4550-b375-bd8b0599cf08 req-bc9ddf66-af7b-4b45-a352-14987f0db1ac service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Updating instance_info_cache with network_info: [{"id": "6e993a7b-e7ba-4beb-ac82-cd2f2b111371", "address": "fa:16:3e:05:08:7e", "network": {"id": "d8124016-8788-4799-b1a6-b95e841f2a03", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-914036947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6324a68b7914b08847262f7773460cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c29724c-5452-441a-8060-5bf89d1f5847", "external-id": "nsx-vlan-transportzone-683", "segmentation_id": 683, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e993a7b-e7", "ovs_interfaceid": "6e993a7b-e7ba-4beb-ac82-cd2f2b111371", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.554300] env[62627]: DEBUG nova.compute.utils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1058.562090] env[62627]: DEBUG nova.compute.manager [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1058.562090] env[62627]: DEBUG nova.network.neutron [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1058.586688] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.607804] env[62627]: DEBUG nova.network.neutron [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Successfully updated port: 1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1058.626421] env[62627]: DEBUG nova.policy [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '939b4ef10c614698b1d9232c01d56905', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6324a68b7914b08847262f7773460cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1058.677405] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d0ce99-c419-5c3f-9484-4e32af71f5b7, 'name': SearchDatastore_Task, 'duration_secs': 0.013015} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.678319] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.678581] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1058.678797] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.678950] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.679183] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1058.683159] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2883c4eb-ef3a-4846-b97d-3136fb2f7775 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.692201] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1058.692982] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1058.693599] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41bcfcba-17bd-438d-9ddf-274da2f0fd0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.705050] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1058.705050] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]527a00a6-a82c-b5be-5e4e-fe49851053a7" [ 1058.705050] env[62627]: _type = "Task" [ 1058.705050] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.718027] env[62627]: DEBUG nova.compute.manager [req-c4b5c18e-e7b7-4630-932f-a87ca373816e req-1aef0462-fd6e-4dc9-978c-c9e22eb92067 service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Received event network-changed-861a18bd-3c64-44b6-9e6b-a78d9449b584 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1058.718153] env[62627]: DEBUG nova.compute.manager [req-c4b5c18e-e7b7-4630-932f-a87ca373816e req-1aef0462-fd6e-4dc9-978c-c9e22eb92067 service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Refreshing instance network info cache due to event network-changed-861a18bd-3c64-44b6-9e6b-a78d9449b584. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1058.718286] env[62627]: DEBUG oslo_concurrency.lockutils [req-c4b5c18e-e7b7-4630-932f-a87ca373816e req-1aef0462-fd6e-4dc9-978c-c9e22eb92067 service nova] Acquiring lock "refresh_cache-513c755a-424a-4e47-887e-7b315ad2e583" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.718956] env[62627]: DEBUG oslo_concurrency.lockutils [req-c4b5c18e-e7b7-4630-932f-a87ca373816e req-1aef0462-fd6e-4dc9-978c-c9e22eb92067 service nova] Acquired lock "refresh_cache-513c755a-424a-4e47-887e-7b315ad2e583" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.718956] env[62627]: DEBUG nova.network.neutron [req-c4b5c18e-e7b7-4630-932f-a87ca373816e req-1aef0462-fd6e-4dc9-978c-c9e22eb92067 service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Refreshing network info cache for port 861a18bd-3c64-44b6-9e6b-a78d9449b584 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.720326] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527a00a6-a82c-b5be-5e4e-fe49851053a7, 'name': SearchDatastore_Task, 'duration_secs': 0.00945} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.724302] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20331185-288d-42b9-aaf0-a9583b4da0a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.730678] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1058.730678] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d98012-19c3-fa17-1de3-f39a940801c5" [ 1058.730678] env[62627]: _type = "Task" [ 1058.730678] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.747430] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d98012-19c3-fa17-1de3-f39a940801c5, 'name': SearchDatastore_Task, 'duration_secs': 0.008818} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.748380] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.748380] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] e5a1fa80-6579-4334-9705-33a95dbc3797/e5a1fa80-6579-4334-9705-33a95dbc3797.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1058.748647] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a721f4a1-1b64-4dfe-af99-ea3c5e4f35c3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.757317] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194831, 'name': CloneVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.762016] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1058.762016] env[62627]: value = "task-2194835" [ 1058.762016] env[62627]: _type = "Task" [ 1058.762016] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.770491] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.781608] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c11fef4d-97b6-4f48-a1d5-0e1baf361d61 tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Lock "513c755a-424a-4e47-887e-7b315ad2e583" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.240s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.860178] env[62627]: INFO nova.compute.manager [-] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Took 1.22 seconds to deallocate network for instance. [ 1058.870121] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da148071-958d-45f0-b34c-2aa069e22594 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.877946] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00a1638-d61f-4f95-ac98-33be9cba08c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.914415] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440dc2d8-2931-45ad-8e56-a3194133ba99 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.923750] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41add451-36f3-4b4e-a9c8-ef5f79ed8ccf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.928288] env[62627]: DEBUG nova.network.neutron [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Successfully created port: 7a1d4f75-d881-4698-995b-8545c20494e7 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1058.942499] env[62627]: DEBUG nova.compute.provider_tree [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1058.944913] env[62627]: DEBUG oslo_concurrency.lockutils [req-c3bfbcae-8a9b-4550-b375-bd8b0599cf08 req-bc9ddf66-af7b-4b45-a352-14987f0db1ac service nova] Releasing lock "refresh_cache-e5a1fa80-6579-4334-9705-33a95dbc3797" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.061690] env[62627]: DEBUG nova.compute.manager [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1059.113898] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "refresh_cache-61f54696-1ce2-4515-ab9c-72347dbd9de0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.114060] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired lock "refresh_cache-61f54696-1ce2-4515-ab9c-72347dbd9de0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.114108] env[62627]: DEBUG nova.network.neutron [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1059.249575] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194831, 'name': CloneVM_Task, 'duration_secs': 1.60724} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.249960] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Created linked-clone VM from snapshot [ 1059.250782] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d2b82b-54a0-4e0e-971e-dbde181ca96d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.261024] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Uploading image 710b1144-d228-420e-89f3-944450e31d8e {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1059.276548] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194835, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.282248] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1059.282555] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2cd29d99-e093-42c8-8987-2a70bdc226eb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.288814] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1059.288814] env[62627]: value = "task-2194836" [ 1059.288814] env[62627]: _type = "Task" [ 1059.288814] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.299166] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194836, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.367905] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.464646] env[62627]: ERROR nova.scheduler.client.report [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] [req-d3bd5065-4edf-41ec-ba81-6c072cbed525] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d3bd5065-4edf-41ec-ba81-6c072cbed525"}]} [ 1059.480426] env[62627]: DEBUG nova.scheduler.client.report [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1059.489415] env[62627]: DEBUG nova.network.neutron [req-c4b5c18e-e7b7-4630-932f-a87ca373816e req-1aef0462-fd6e-4dc9-978c-c9e22eb92067 service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Updated VIF entry in instance network info cache for port 861a18bd-3c64-44b6-9e6b-a78d9449b584. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1059.489740] env[62627]: DEBUG nova.network.neutron [req-c4b5c18e-e7b7-4630-932f-a87ca373816e req-1aef0462-fd6e-4dc9-978c-c9e22eb92067 service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Updating instance_info_cache with network_info: [{"id": "861a18bd-3c64-44b6-9e6b-a78d9449b584", "address": "fa:16:3e:66:93:f9", "network": {"id": "df1ea580-9d30-4a58-b115-4e87a51656ae", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1521957101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5206f46e638467bb689425ae674683a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "34a581cb-6d33-4e2e-af50-735a6749d6da", "external-id": "nsx-vlan-transportzone-673", "segmentation_id": 673, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861a18bd-3c", "ovs_interfaceid": "861a18bd-3c64-44b6-9e6b-a78d9449b584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.494562] env[62627]: DEBUG nova.scheduler.client.report [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1059.494763] env[62627]: DEBUG nova.compute.provider_tree [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1059.505246] env[62627]: DEBUG nova.scheduler.client.report [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1059.521623] env[62627]: DEBUG nova.scheduler.client.report [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1059.648732] env[62627]: DEBUG nova.network.neutron [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1059.712175] env[62627]: DEBUG nova.compute.manager [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Received event network-vif-deleted-66cad317-3512-4850-8957-332a2fffb1c0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1059.712175] env[62627]: DEBUG nova.compute.manager [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Received event network-vif-plugged-1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1059.712175] env[62627]: DEBUG oslo_concurrency.lockutils [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] Acquiring lock "61f54696-1ce2-4515-ab9c-72347dbd9de0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.712175] env[62627]: DEBUG oslo_concurrency.lockutils [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] Lock "61f54696-1ce2-4515-ab9c-72347dbd9de0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.712175] env[62627]: DEBUG oslo_concurrency.lockutils [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] Lock "61f54696-1ce2-4515-ab9c-72347dbd9de0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.712175] env[62627]: DEBUG nova.compute.manager [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] No waiting events found dispatching network-vif-plugged-1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1059.712175] env[62627]: WARNING nova.compute.manager [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Received unexpected event network-vif-plugged-1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f for instance with vm_state building and task_state spawning. [ 1059.713106] env[62627]: DEBUG nova.compute.manager [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Received event network-changed-1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1059.713549] env[62627]: DEBUG nova.compute.manager [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Refreshing instance network info cache due to event network-changed-1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1059.713851] env[62627]: DEBUG oslo_concurrency.lockutils [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] Acquiring lock "refresh_cache-61f54696-1ce2-4515-ab9c-72347dbd9de0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.776474] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194835, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534574} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.777937] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] e5a1fa80-6579-4334-9705-33a95dbc3797/e5a1fa80-6579-4334-9705-33a95dbc3797.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1059.778731] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1059.779897] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da2a615-3db4-44d0-871e-b250065347ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.784344] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c193a024-8d19-4237-9ae4-a8c319edf8b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.797770] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd640f0-d782-4dd7-a3b6-25339ea02a06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.801732] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1059.801732] env[62627]: value = "task-2194837" [ 1059.801732] env[62627]: _type = "Task" [ 1059.801732] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.803134] env[62627]: DEBUG nova.network.neutron [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Updating instance_info_cache with network_info: [{"id": "1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f", "address": "fa:16:3e:82:ee:46", "network": {"id": "d8124016-8788-4799-b1a6-b95e841f2a03", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-914036947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6324a68b7914b08847262f7773460cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c29724c-5452-441a-8060-5bf89d1f5847", "external-id": "nsx-vlan-transportzone-683", "segmentation_id": 683, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a7a45f5-84", "ovs_interfaceid": "1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.843995] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194836, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.846490] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cfc55f-c969-456f-9203-55fac9a81494 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.853571] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194837, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.859632] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aadf8bc-9420-419f-a787-99d9071b7483 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.875703] env[62627]: DEBUG nova.compute.provider_tree [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1059.993099] env[62627]: DEBUG oslo_concurrency.lockutils [req-c4b5c18e-e7b7-4630-932f-a87ca373816e req-1aef0462-fd6e-4dc9-978c-c9e22eb92067 service nova] Releasing lock "refresh_cache-513c755a-424a-4e47-887e-7b315ad2e583" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.070612] env[62627]: DEBUG nova.compute.manager [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1060.096393] env[62627]: DEBUG nova.virt.hardware [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1060.096668] env[62627]: DEBUG nova.virt.hardware [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1060.096860] env[62627]: DEBUG nova.virt.hardware [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1060.097131] env[62627]: DEBUG nova.virt.hardware [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1060.097290] env[62627]: DEBUG nova.virt.hardware [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1060.097439] env[62627]: DEBUG nova.virt.hardware [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1060.097759] env[62627]: DEBUG nova.virt.hardware [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1060.097940] env[62627]: DEBUG nova.virt.hardware [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1060.098144] env[62627]: DEBUG nova.virt.hardware [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1060.098352] env[62627]: DEBUG nova.virt.hardware [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1060.098529] env[62627]: DEBUG nova.virt.hardware [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1060.099440] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f563da-8530-4301-9c1e-02731d175621 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.108715] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803f971b-f0cf-489b-8fd0-d47808871cc8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.300783] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194836, 'name': Destroy_Task, 'duration_secs': 0.988081} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.301101] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Destroyed the VM [ 1060.301331] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1060.301585] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8c9df055-195d-474e-9908-108e884dd463 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.308816] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1060.308816] env[62627]: value = "task-2194838" [ 1060.308816] env[62627]: _type = "Task" [ 1060.308816] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.309668] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Releasing lock "refresh_cache-61f54696-1ce2-4515-ab9c-72347dbd9de0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.309962] env[62627]: DEBUG nova.compute.manager [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Instance network_info: |[{"id": "1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f", "address": "fa:16:3e:82:ee:46", "network": {"id": "d8124016-8788-4799-b1a6-b95e841f2a03", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-914036947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6324a68b7914b08847262f7773460cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c29724c-5452-441a-8060-5bf89d1f5847", "external-id": "nsx-vlan-transportzone-683", "segmentation_id": 683, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a7a45f5-84", "ovs_interfaceid": "1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1060.317313] env[62627]: DEBUG oslo_concurrency.lockutils [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] Acquired lock "refresh_cache-61f54696-1ce2-4515-ab9c-72347dbd9de0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.317540] env[62627]: DEBUG nova.network.neutron [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Refreshing network info cache for port 1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1060.318736] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:ee:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c29724c-5452-441a-8060-5bf89d1f5847', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1060.326079] env[62627]: DEBUG oslo.service.loopingcall [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1060.326351] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194837, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078856} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.329540] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1060.330233] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1060.330805] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d8d5a10-13d0-4a12-ae0f-bf35b56cd254 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.345936] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937bb267-3c12-4826-a924-1a8c6d0b64e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.352134] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194838, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.375172] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] e5a1fa80-6579-4334-9705-33a95dbc3797/e5a1fa80-6579-4334-9705-33a95dbc3797.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1060.377123] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b6ab82b-3f89-4695-bc4a-3f4f5c73e5e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.391368] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1060.391368] env[62627]: value = "task-2194839" [ 1060.391368] env[62627]: _type = "Task" [ 1060.391368] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.401823] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1060.401823] env[62627]: value = "task-2194840" [ 1060.401823] env[62627]: _type = "Task" [ 1060.401823] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.408113] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194839, 'name': CreateVM_Task} progress is 15%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.413378] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194840, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.427666] env[62627]: DEBUG nova.scheduler.client.report [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1060.427801] env[62627]: DEBUG nova.compute.provider_tree [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 97 to 98 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1060.427934] env[62627]: DEBUG nova.compute.provider_tree [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1060.725232] env[62627]: DEBUG nova.network.neutron [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Updated VIF entry in instance network info cache for port 1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1060.725593] env[62627]: DEBUG nova.network.neutron [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Updating instance_info_cache with network_info: [{"id": "1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f", "address": "fa:16:3e:82:ee:46", "network": {"id": "d8124016-8788-4799-b1a6-b95e841f2a03", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-914036947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6324a68b7914b08847262f7773460cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c29724c-5452-441a-8060-5bf89d1f5847", "external-id": "nsx-vlan-transportzone-683", "segmentation_id": 683, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a7a45f5-84", "ovs_interfaceid": "1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.794927] env[62627]: DEBUG nova.network.neutron [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Successfully updated port: 7a1d4f75-d881-4698-995b-8545c20494e7 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1060.820092] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194838, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.905721] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194839, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.913567] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194840, 'name': ReconfigVM_Task, 'duration_secs': 0.303642} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.913840] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Reconfigured VM instance instance-0000004c to attach disk [datastore1] e5a1fa80-6579-4334-9705-33a95dbc3797/e5a1fa80-6579-4334-9705-33a95dbc3797.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1060.914426] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4dfe1559-5809-4095-8065-3a22e6d6f121 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.920653] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1060.920653] env[62627]: value = "task-2194841" [ 1060.920653] env[62627]: _type = "Task" [ 1060.920653] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.928081] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194841, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.932883] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.883s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.934968] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.964s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.936437] env[62627]: INFO nova.compute.claims [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1060.953648] env[62627]: INFO nova.scheduler.client.report [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Deleted allocations for instance 16ac82e3-b9e3-4c34-bb8c-2989cc33246f [ 1061.228107] env[62627]: DEBUG oslo_concurrency.lockutils [req-20f1f72a-99c6-4bec-9a01-15118864c052 req-5f05914e-7cda-49fc-9685-b38d757a5097 service nova] Releasing lock "refresh_cache-61f54696-1ce2-4515-ab9c-72347dbd9de0" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.298118] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "refresh_cache-5edf4b5b-93b1-4efc-80ec-f3a7009d5449" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.298184] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired lock "refresh_cache-5edf4b5b-93b1-4efc-80ec-f3a7009d5449" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.298389] env[62627]: DEBUG nova.network.neutron [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1061.325347] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194838, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.406725] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194839, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.429447] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194841, 'name': Rename_Task, 'duration_secs': 0.205523} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.429716] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1061.429957] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d12c5dea-d4c3-4de5-8f64-be7318f199c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.436213] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1061.436213] env[62627]: value = "task-2194842" [ 1061.436213] env[62627]: _type = "Task" [ 1061.436213] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.446991] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194842, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.462473] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8c1f4bcb-f9da-4eaf-a217-0537023ff0c9 tempest-TenantUsagesTestJSON-1395357459 tempest-TenantUsagesTestJSON-1395357459-project-member] Lock "16ac82e3-b9e3-4c34-bb8c-2989cc33246f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.292s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.740571] env[62627]: DEBUG nova.compute.manager [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Received event network-vif-plugged-7a1d4f75-d881-4698-995b-8545c20494e7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1061.740786] env[62627]: DEBUG oslo_concurrency.lockutils [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] Acquiring lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.741280] env[62627]: DEBUG oslo_concurrency.lockutils [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] Lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.741504] env[62627]: DEBUG oslo_concurrency.lockutils [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] Lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.741683] env[62627]: DEBUG nova.compute.manager [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] No waiting events found dispatching network-vif-plugged-7a1d4f75-d881-4698-995b-8545c20494e7 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1061.741850] env[62627]: WARNING nova.compute.manager [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Received unexpected event network-vif-plugged-7a1d4f75-d881-4698-995b-8545c20494e7 for instance with vm_state building and task_state spawning. [ 1061.742025] env[62627]: DEBUG nova.compute.manager [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Received event network-changed-7a1d4f75-d881-4698-995b-8545c20494e7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1061.742182] env[62627]: DEBUG nova.compute.manager [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Refreshing instance network info cache due to event network-changed-7a1d4f75-d881-4698-995b-8545c20494e7. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1061.742507] env[62627]: DEBUG oslo_concurrency.lockutils [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] Acquiring lock "refresh_cache-5edf4b5b-93b1-4efc-80ec-f3a7009d5449" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.821548] env[62627]: DEBUG oslo_vmware.api [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194838, 'name': RemoveSnapshot_Task, 'duration_secs': 1.131137} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.821814] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1061.832264] env[62627]: DEBUG nova.network.neutron [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1061.907598] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194839, 'name': CreateVM_Task, 'duration_secs': 1.412939} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.907767] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1061.908446] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.908629] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.908951] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1061.909233] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6be95579-d0a3-41e3-952b-4f87a9812c17 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.915055] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1061.915055] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224540b-b131-d94a-c7c9-a9d4b78b0e0c" [ 1061.915055] env[62627]: _type = "Task" [ 1061.915055] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.923686] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224540b-b131-d94a-c7c9-a9d4b78b0e0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.949839] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194842, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.989102] env[62627]: DEBUG nova.network.neutron [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Updating instance_info_cache with network_info: [{"id": "7a1d4f75-d881-4698-995b-8545c20494e7", "address": "fa:16:3e:d0:71:f7", "network": {"id": "d8124016-8788-4799-b1a6-b95e841f2a03", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-914036947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6324a68b7914b08847262f7773460cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c29724c-5452-441a-8060-5bf89d1f5847", "external-id": "nsx-vlan-transportzone-683", "segmentation_id": 683, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a1d4f75-d8", "ovs_interfaceid": "7a1d4f75-d881-4698-995b-8545c20494e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.185201] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04f1f30-a271-4ec7-90e5-a694c3aad677 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.193477] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca99e7a-a5cb-4835-96bc-2917ca1d8bab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.225427] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b61f4c-1657-4741-9b7e-48782587b92b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.233507] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4857520-f401-4517-9113-341a5e0acaed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.248102] env[62627]: DEBUG nova.compute.provider_tree [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1062.326756] env[62627]: WARNING nova.compute.manager [None req-cc0630d4-bebb-4218-9fa6-67d8991c4140 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Image not found during snapshot: nova.exception.ImageNotFound: Image 710b1144-d228-420e-89f3-944450e31d8e could not be found. [ 1062.424831] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224540b-b131-d94a-c7c9-a9d4b78b0e0c, 'name': SearchDatastore_Task, 'duration_secs': 0.013985} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.425140] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.425373] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1062.425600] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.425745] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.425955] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1062.426230] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0e7dcc5-3b22-4695-9082-f1eb804093ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.434324] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1062.434495] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1062.435212] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fe151f0-c50a-479b-abf6-a7bb5e68855c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.443580] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1062.443580] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528558e6-c6f6-97bc-974d-746b5abe9699" [ 1062.443580] env[62627]: _type = "Task" [ 1062.443580] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.446414] env[62627]: DEBUG oslo_vmware.api [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194842, 'name': PowerOnVM_Task, 'duration_secs': 0.661019} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.449159] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1062.449371] env[62627]: INFO nova.compute.manager [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Took 7.11 seconds to spawn the instance on the hypervisor. [ 1062.449546] env[62627]: DEBUG nova.compute.manager [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1062.450256] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8650d5-3af0-4f59-9414-36762cbb01db {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.457086] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528558e6-c6f6-97bc-974d-746b5abe9699, 'name': SearchDatastore_Task, 'duration_secs': 0.008836} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.461570] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7afda656-0615-462b-9a37-3fa62db8dd5a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.466458] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1062.466458] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522d93ab-07fc-f453-d61a-2ecf00da1ce5" [ 1062.466458] env[62627]: _type = "Task" [ 1062.466458] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.474494] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522d93ab-07fc-f453-d61a-2ecf00da1ce5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.492144] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Releasing lock "refresh_cache-5edf4b5b-93b1-4efc-80ec-f3a7009d5449" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.492487] env[62627]: DEBUG nova.compute.manager [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Instance network_info: |[{"id": "7a1d4f75-d881-4698-995b-8545c20494e7", "address": "fa:16:3e:d0:71:f7", "network": {"id": "d8124016-8788-4799-b1a6-b95e841f2a03", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-914036947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6324a68b7914b08847262f7773460cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c29724c-5452-441a-8060-5bf89d1f5847", "external-id": "nsx-vlan-transportzone-683", "segmentation_id": 683, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a1d4f75-d8", "ovs_interfaceid": "7a1d4f75-d881-4698-995b-8545c20494e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1062.492755] env[62627]: DEBUG oslo_concurrency.lockutils [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] Acquired lock "refresh_cache-5edf4b5b-93b1-4efc-80ec-f3a7009d5449" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.492936] env[62627]: DEBUG nova.network.neutron [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Refreshing network info cache for port 7a1d4f75-d881-4698-995b-8545c20494e7 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1062.494141] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:71:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c29724c-5452-441a-8060-5bf89d1f5847', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a1d4f75-d881-4698-995b-8545c20494e7', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.502047] env[62627]: DEBUG oslo.service.loopingcall [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1062.502995] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1062.503659] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb6b84d9-914f-49e2-b15a-743ec3c10bc8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.523155] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.523155] env[62627]: value = "task-2194843" [ 1062.523155] env[62627]: _type = "Task" [ 1062.523155] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.530669] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194843, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.781269] env[62627]: DEBUG nova.scheduler.client.report [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 98 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1062.781269] env[62627]: DEBUG nova.compute.provider_tree [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 98 to 99 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1062.781521] env[62627]: DEBUG nova.compute.provider_tree [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1062.974297] env[62627]: INFO nova.compute.manager [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Took 16.39 seconds to build instance. [ 1062.979577] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522d93ab-07fc-f453-d61a-2ecf00da1ce5, 'name': SearchDatastore_Task, 'duration_secs': 0.008725} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.979869] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.980144] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 61f54696-1ce2-4515-ab9c-72347dbd9de0/61f54696-1ce2-4515-ab9c-72347dbd9de0.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1062.980399] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9e3da26-a2e7-417b-bf35-99bfeb5b96c7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.987690] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1062.987690] env[62627]: value = "task-2194844" [ 1062.987690] env[62627]: _type = "Task" [ 1062.987690] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.997976] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.033182] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194843, 'name': CreateVM_Task, 'duration_secs': 0.337185} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.033335] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1063.034015] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.034180] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.034487] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1063.034864] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a7b1f89-cb37-4e7a-b767-10c81242334e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.038842] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1063.038842] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52174ee8-a16f-a06a-292d-e330a49860fb" [ 1063.038842] env[62627]: _type = "Task" [ 1063.038842] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.048760] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52174ee8-a16f-a06a-292d-e330a49860fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.255526] env[62627]: DEBUG nova.network.neutron [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Updated VIF entry in instance network info cache for port 7a1d4f75-d881-4698-995b-8545c20494e7. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1063.255936] env[62627]: DEBUG nova.network.neutron [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Updating instance_info_cache with network_info: [{"id": "7a1d4f75-d881-4698-995b-8545c20494e7", "address": "fa:16:3e:d0:71:f7", "network": {"id": "d8124016-8788-4799-b1a6-b95e841f2a03", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-914036947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6324a68b7914b08847262f7773460cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c29724c-5452-441a-8060-5bf89d1f5847", "external-id": "nsx-vlan-transportzone-683", "segmentation_id": 683, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a1d4f75-d8", "ovs_interfaceid": "7a1d4f75-d881-4698-995b-8545c20494e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.287093] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.287697] env[62627]: DEBUG nova.compute.manager [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1063.290643] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.705s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.292690] env[62627]: INFO nova.compute.claims [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1063.311756] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.311756] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.312097] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.312097] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.312267] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.314747] env[62627]: INFO nova.compute.manager [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Terminating instance [ 1063.481226] env[62627]: DEBUG oslo_concurrency.lockutils [None req-076d20f7-9ec1-40dc-850d-80266e1e9a88 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "e5a1fa80-6579-4334-9705-33a95dbc3797" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.912s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.498774] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194844, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.548531] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52174ee8-a16f-a06a-292d-e330a49860fb, 'name': SearchDatastore_Task, 'duration_secs': 0.017111} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.548860] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.549157] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1063.549402] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.549546] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.549720] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1063.549981] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83fb0071-0ffe-4517-9dd0-bc14e5d62fa6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.559043] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1063.559349] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1063.560446] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce64242f-d3a8-4e8e-b084-8befd0f47c06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.567581] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1063.567581] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529bd0e8-10c2-116d-16f1-c59bd220430f" [ 1063.567581] env[62627]: _type = "Task" [ 1063.567581] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.578330] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529bd0e8-10c2-116d-16f1-c59bd220430f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.759392] env[62627]: DEBUG oslo_concurrency.lockutils [req-270dd648-4f6e-49d5-8534-6fc7c691287d req-fb6777c4-a2d8-4e79-a241-e9bd3474bd17 service nova] Releasing lock "refresh_cache-5edf4b5b-93b1-4efc-80ec-f3a7009d5449" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.797344] env[62627]: DEBUG nova.compute.utils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1063.801060] env[62627]: DEBUG nova.compute.manager [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1063.801293] env[62627]: DEBUG nova.network.neutron [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1063.818639] env[62627]: DEBUG nova.compute.manager [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1063.818853] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1063.819968] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3410db7a-ae3e-4761-a64e-00abfb1dc420 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.828654] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1063.828906] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-061a6846-3238-4a5e-9f87-36a562c4f2c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.835416] env[62627]: DEBUG oslo_vmware.api [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1063.835416] env[62627]: value = "task-2194845" [ 1063.835416] env[62627]: _type = "Task" [ 1063.835416] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.845646] env[62627]: DEBUG oslo_vmware.api [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.847160] env[62627]: DEBUG nova.policy [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1253a87d708448aba8d97fcabe137853', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c9263b20f714274a55b88a8bdbd5251', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1064.000291] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194844, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547716} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.001104] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 61f54696-1ce2-4515-ab9c-72347dbd9de0/61f54696-1ce2-4515-ab9c-72347dbd9de0.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1064.001104] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1064.001509] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2d92bb3-ef5f-4d5a-9f21-911d50b3695a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.008783] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1064.008783] env[62627]: value = "task-2194846" [ 1064.008783] env[62627]: _type = "Task" [ 1064.008783] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.017665] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194846, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.079995] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529bd0e8-10c2-116d-16f1-c59bd220430f, 'name': SearchDatastore_Task, 'duration_secs': 0.046899} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.080888] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bab8ba7-593c-4cdf-b86a-fe4b70510f73 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.086412] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1064.086412] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522edcbf-898c-7713-b719-049f928369b8" [ 1064.086412] env[62627]: _type = "Task" [ 1064.086412] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.097251] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522edcbf-898c-7713-b719-049f928369b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.194300] env[62627]: DEBUG nova.network.neutron [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Successfully created port: 8170eace-a3a9-4b61-8d61-10d08aa65847 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1064.305029] env[62627]: DEBUG nova.compute.manager [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1064.353055] env[62627]: DEBUG oslo_vmware.api [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194845, 'name': PowerOffVM_Task, 'duration_secs': 0.248198} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.353329] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.354688] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1064.354688] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73e4914e-94a7-44a7-aba9-302be4b00e8d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.440924] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1064.440924] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1064.440924] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleting the datastore file [datastore2] e263159d-e9ff-4a19-a29a-cb06eaf2d679 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1064.441107] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a77bc080-9a08-481c-86cd-0189f20c2409 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.448192] env[62627]: DEBUG oslo_vmware.api [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for the task: (returnval){ [ 1064.448192] env[62627]: value = "task-2194848" [ 1064.448192] env[62627]: _type = "Task" [ 1064.448192] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.459073] env[62627]: DEBUG oslo_vmware.api [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.524065] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194846, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.143105} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.524644] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1064.525454] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271688bd-7a6e-4e7a-911d-c27be5c25020 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.549891] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] 61f54696-1ce2-4515-ab9c-72347dbd9de0/61f54696-1ce2-4515-ab9c-72347dbd9de0.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1064.553016] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cd050de-3f5d-4515-ba73-9f59b877b70a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.574330] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1064.574330] env[62627]: value = "task-2194849" [ 1064.574330] env[62627]: _type = "Task" [ 1064.574330] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.582602] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194849, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.596550] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522edcbf-898c-7713-b719-049f928369b8, 'name': SearchDatastore_Task, 'duration_secs': 0.022997} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.599611] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.599903] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 5edf4b5b-93b1-4efc-80ec-f3a7009d5449/5edf4b5b-93b1-4efc-80ec-f3a7009d5449.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1064.600378] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-453f5e85-ca70-4a56-b33a-c45277574e3d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.607464] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1064.607464] env[62627]: value = "task-2194850" [ 1064.607464] env[62627]: _type = "Task" [ 1064.607464] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.619521] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.652966] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a1ca43-ae9a-4049-b4f3-a7dd4f7f7735 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.660384] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cc01de-6b5a-4805-9fba-7234548d85ca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.692114] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18bd7fa-a946-4112-8c8a-50af61819aeb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.700051] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a80fdaf-4d89-4406-ad98-1213cf20c4e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.715319] env[62627]: DEBUG nova.compute.provider_tree [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.959636] env[62627]: DEBUG oslo_vmware.api [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Task: {'id': task-2194848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.378337} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.960145] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1064.960242] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1064.960444] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1064.960642] env[62627]: INFO nova.compute.manager [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1064.960916] env[62627]: DEBUG oslo.service.loopingcall [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1064.961159] env[62627]: DEBUG nova.compute.manager [-] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1064.961607] env[62627]: DEBUG nova.network.neutron [-] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1065.084876] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194849, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.117964] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.220350] env[62627]: DEBUG nova.scheduler.client.report [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1065.314480] env[62627]: DEBUG nova.compute.manager [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1065.338542] env[62627]: DEBUG nova.virt.hardware [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1065.338796] env[62627]: DEBUG nova.virt.hardware [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1065.338952] env[62627]: DEBUG nova.virt.hardware [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1065.339155] env[62627]: DEBUG nova.virt.hardware [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1065.339301] env[62627]: DEBUG nova.virt.hardware [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1065.339444] env[62627]: DEBUG nova.virt.hardware [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1065.339689] env[62627]: DEBUG nova.virt.hardware [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1065.339873] env[62627]: DEBUG nova.virt.hardware [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1065.340052] env[62627]: DEBUG nova.virt.hardware [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1065.340210] env[62627]: DEBUG nova.virt.hardware [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1065.340384] env[62627]: DEBUG nova.virt.hardware [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1065.341259] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e09270-50fa-45da-9733-ac6d998f7734 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.349314] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68b95e9-3134-47a8-bacc-c44604e6cc98 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.446724] env[62627]: DEBUG nova.compute.manager [req-88c7efc9-428c-4766-9603-7d0e92cdb0b6 req-14b51a8c-5b48-40ac-97e9-d2bdb1f92b06 service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Received event network-vif-deleted-08a5a113-46c4-4d22-b644-b503cfb32646 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1065.446958] env[62627]: INFO nova.compute.manager [req-88c7efc9-428c-4766-9603-7d0e92cdb0b6 req-14b51a8c-5b48-40ac-97e9-d2bdb1f92b06 service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Neutron deleted interface 08a5a113-46c4-4d22-b644-b503cfb32646; detaching it from the instance and deleting it from the info cache [ 1065.447182] env[62627]: DEBUG nova.network.neutron [req-88c7efc9-428c-4766-9603-7d0e92cdb0b6 req-14b51a8c-5b48-40ac-97e9-d2bdb1f92b06 service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.588878] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194849, 'name': ReconfigVM_Task, 'duration_secs': 0.529826} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.589761] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Reconfigured VM instance instance-0000004d to attach disk [datastore2] 61f54696-1ce2-4515-ab9c-72347dbd9de0/61f54696-1ce2-4515-ab9c-72347dbd9de0.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.590889] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4b66f85-ad03-4051-a1a1-308e3b734684 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.599188] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1065.599188] env[62627]: value = "task-2194851" [ 1065.599188] env[62627]: _type = "Task" [ 1065.599188] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.608379] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194851, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.618457] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194850, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.725606] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.726215] env[62627]: DEBUG nova.compute.manager [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1065.728956] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.361s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.729246] env[62627]: DEBUG nova.objects.instance [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lazy-loading 'resources' on Instance uuid f8003eee-248c-4005-8465-6e3e96f98f88 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.796738] env[62627]: DEBUG nova.network.neutron [-] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.829449] env[62627]: DEBUG nova.network.neutron [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Successfully updated port: 8170eace-a3a9-4b61-8d61-10d08aa65847 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1065.950470] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3dd80dbf-d336-4628-9420-075a8a25e201 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.960921] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c06c80-48ec-4799-bfd5-50e7e5eb8ae0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.995462] env[62627]: DEBUG nova.compute.manager [req-88c7efc9-428c-4766-9603-7d0e92cdb0b6 req-14b51a8c-5b48-40ac-97e9-d2bdb1f92b06 service nova] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Detach interface failed, port_id=08a5a113-46c4-4d22-b644-b503cfb32646, reason: Instance e263159d-e9ff-4a19-a29a-cb06eaf2d679 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1066.109176] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194851, 'name': Rename_Task, 'duration_secs': 0.219962} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.109414] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1066.109658] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a87b984-64a0-4dd2-b1b5-764a7bd46187 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.119261] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194850, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.120288] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1066.120288] env[62627]: value = "task-2194852" [ 1066.120288] env[62627]: _type = "Task" [ 1066.120288] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.127217] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194852, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.231836] env[62627]: DEBUG nova.compute.utils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1066.235971] env[62627]: DEBUG nova.compute.manager [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1066.236165] env[62627]: DEBUG nova.network.neutron [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1066.286501] env[62627]: DEBUG nova.policy [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ade098da576147a6bff6685cb886ff78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a97f57bdf4a46db80460b1d5c9f9b9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1066.299880] env[62627]: INFO nova.compute.manager [-] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Took 1.34 seconds to deallocate network for instance. [ 1066.332679] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "refresh_cache-3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.332679] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "refresh_cache-3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.332679] env[62627]: DEBUG nova.network.neutron [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.460904] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6cbdce-24bf-4bf1-ae6d-4baf77f38f66 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.468551] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c99304-4cb0-435d-8e42-4b3105a4c187 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.498640] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac02bcb-7adc-4b26-a6b3-540648e5c5eb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.506378] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99f2e15-182e-4a83-8b25-c6cce29966f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.519483] env[62627]: DEBUG nova.compute.provider_tree [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.622678] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194850, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.629969] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194852, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.631510] env[62627]: DEBUG nova.network.neutron [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Successfully created port: c77ae6ec-d1ea-4f56-b856-910a5835bc0d {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1066.736764] env[62627]: DEBUG nova.compute.manager [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1066.809349] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.873848] env[62627]: DEBUG nova.network.neutron [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1067.023741] env[62627]: DEBUG nova.scheduler.client.report [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1067.070041] env[62627]: DEBUG nova.network.neutron [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Updating instance_info_cache with network_info: [{"id": "8170eace-a3a9-4b61-8d61-10d08aa65847", "address": "fa:16:3e:14:e3:69", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8170eace-a3", "ovs_interfaceid": "8170eace-a3a9-4b61-8d61-10d08aa65847", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.122530] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194850, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.155785} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.122886] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 5edf4b5b-93b1-4efc-80ec-f3a7009d5449/5edf4b5b-93b1-4efc-80ec-f3a7009d5449.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1067.123121] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1067.126398] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-970ad66d-f9a6-42c2-bdee-345d0de339cf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.133324] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194852, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.134769] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1067.134769] env[62627]: value = "task-2194853" [ 1067.134769] env[62627]: _type = "Task" [ 1067.134769] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.142549] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194853, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.476787] env[62627]: DEBUG nova.compute.manager [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Received event network-vif-plugged-8170eace-a3a9-4b61-8d61-10d08aa65847 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1067.477015] env[62627]: DEBUG oslo_concurrency.lockutils [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] Acquiring lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.477277] env[62627]: DEBUG oslo_concurrency.lockutils [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] Lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.477448] env[62627]: DEBUG oslo_concurrency.lockutils [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] Lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.477609] env[62627]: DEBUG nova.compute.manager [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] No waiting events found dispatching network-vif-plugged-8170eace-a3a9-4b61-8d61-10d08aa65847 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1067.477770] env[62627]: WARNING nova.compute.manager [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Received unexpected event network-vif-plugged-8170eace-a3a9-4b61-8d61-10d08aa65847 for instance with vm_state building and task_state spawning. [ 1067.477926] env[62627]: DEBUG nova.compute.manager [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Received event network-changed-8170eace-a3a9-4b61-8d61-10d08aa65847 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1067.478339] env[62627]: DEBUG nova.compute.manager [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Refreshing instance network info cache due to event network-changed-8170eace-a3a9-4b61-8d61-10d08aa65847. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1067.478552] env[62627]: DEBUG oslo_concurrency.lockutils [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] Acquiring lock "refresh_cache-3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.528740] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.800s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.531054] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.722s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.531502] env[62627]: DEBUG nova.objects.instance [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lazy-loading 'resources' on Instance uuid e263159d-e9ff-4a19-a29a-cb06eaf2d679 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.553840] env[62627]: INFO nova.scheduler.client.report [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Deleted allocations for instance f8003eee-248c-4005-8465-6e3e96f98f88 [ 1067.572626] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "refresh_cache-3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1067.572950] env[62627]: DEBUG nova.compute.manager [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Instance network_info: |[{"id": "8170eace-a3a9-4b61-8d61-10d08aa65847", "address": "fa:16:3e:14:e3:69", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8170eace-a3", "ovs_interfaceid": "8170eace-a3a9-4b61-8d61-10d08aa65847", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1067.573255] env[62627]: DEBUG oslo_concurrency.lockutils [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] Acquired lock "refresh_cache-3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.573428] env[62627]: DEBUG nova.network.neutron [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Refreshing network info cache for port 8170eace-a3a9-4b61-8d61-10d08aa65847 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1067.574566] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:e3:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99be9a5e-b3f9-4e6c-83d5-df11f817847d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8170eace-a3a9-4b61-8d61-10d08aa65847', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1067.583508] env[62627]: DEBUG oslo.service.loopingcall [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1067.584304] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1067.584535] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-394bf313-8f90-4d41-9025-a3c96c3fe63b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.606639] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1067.606639] env[62627]: value = "task-2194854" [ 1067.606639] env[62627]: _type = "Task" [ 1067.606639] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.615964] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194854, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.633262] env[62627]: DEBUG oslo_vmware.api [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194852, 'name': PowerOnVM_Task, 'duration_secs': 1.110255} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.633262] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1067.633262] env[62627]: INFO nova.compute.manager [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Took 9.96 seconds to spawn the instance on the hypervisor. [ 1067.633262] env[62627]: DEBUG nova.compute.manager [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.633806] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702e7115-2079-4f87-b02e-bb7413c1b7e7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.654688] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194853, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167263} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.654688] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1067.654688] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a464893-1ff3-4b0d-9094-f7715f434a5c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.685014] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 5edf4b5b-93b1-4efc-80ec-f3a7009d5449/5edf4b5b-93b1-4efc-80ec-f3a7009d5449.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1067.685659] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8da9e5c-5f89-4642-914a-9500a3ea156a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.706937] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1067.706937] env[62627]: value = "task-2194855" [ 1067.706937] env[62627]: _type = "Task" [ 1067.706937] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.716283] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194855, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.750029] env[62627]: DEBUG nova.compute.manager [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1067.784857] env[62627]: DEBUG nova.virt.hardware [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1067.785134] env[62627]: DEBUG nova.virt.hardware [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1067.785316] env[62627]: DEBUG nova.virt.hardware [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1067.785508] env[62627]: DEBUG nova.virt.hardware [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1067.785654] env[62627]: DEBUG nova.virt.hardware [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1067.785796] env[62627]: DEBUG nova.virt.hardware [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1067.786036] env[62627]: DEBUG nova.virt.hardware [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1067.786199] env[62627]: DEBUG nova.virt.hardware [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1067.786359] env[62627]: DEBUG nova.virt.hardware [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1067.786516] env[62627]: DEBUG nova.virt.hardware [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1067.786680] env[62627]: DEBUG nova.virt.hardware [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1067.787577] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474176fa-655d-480d-8c04-da8dcda30dbc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.797346] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b19651-385b-4afb-8c0e-bc0732f1f79e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.069750] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ac005499-9a90-4f39-a221-040b29afc0d4 tempest-InstanceActionsTestJSON-675212662 tempest-InstanceActionsTestJSON-675212662-project-member] Lock "f8003eee-248c-4005-8465-6e3e96f98f88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.110s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.117984] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194854, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.155058] env[62627]: INFO nova.compute.manager [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Took 21.32 seconds to build instance. [ 1068.218733] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194855, 'name': ReconfigVM_Task, 'duration_secs': 0.318945} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.218957] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 5edf4b5b-93b1-4efc-80ec-f3a7009d5449/5edf4b5b-93b1-4efc-80ec-f3a7009d5449.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1068.219815] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-274c8b47-d7ff-492f-8263-ed003cf42b62 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.229758] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1068.229758] env[62627]: value = "task-2194856" [ 1068.229758] env[62627]: _type = "Task" [ 1068.229758] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.239086] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194856, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.311074] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd26838-a682-4bf0-90de-297796f09f0b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.323452] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bf5bd0-1349-4270-ac9b-395c89f00254 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.362902] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c63a079-e868-46e8-b0fd-d14aceb8ef37 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.370514] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3725d2f-e42b-4c8b-a91d-db92a02d8303 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.385504] env[62627]: DEBUG nova.compute.provider_tree [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.620616] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194854, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.656414] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14386c65-ed36-408b-9f8e-0907dfa46aaa tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "61f54696-1ce2-4515-ab9c-72347dbd9de0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.827s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.741183] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194856, 'name': Rename_Task, 'duration_secs': 0.360578} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.742151] env[62627]: DEBUG nova.network.neutron [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Updated VIF entry in instance network info cache for port 8170eace-a3a9-4b61-8d61-10d08aa65847. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1068.742500] env[62627]: DEBUG nova.network.neutron [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Updating instance_info_cache with network_info: [{"id": "8170eace-a3a9-4b61-8d61-10d08aa65847", "address": "fa:16:3e:14:e3:69", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8170eace-a3", "ovs_interfaceid": "8170eace-a3a9-4b61-8d61-10d08aa65847", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.743642] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1068.747592] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bddf4486-d5a8-4f62-b620-b8db989f640d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.754610] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1068.754610] env[62627]: value = "task-2194857" [ 1068.754610] env[62627]: _type = "Task" [ 1068.754610] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.767231] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194857, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.815984] env[62627]: DEBUG nova.network.neutron [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Successfully updated port: c77ae6ec-d1ea-4f56-b856-910a5835bc0d {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1068.889775] env[62627]: DEBUG nova.scheduler.client.report [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1069.122285] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194854, 'name': CreateVM_Task, 'duration_secs': 1.334774} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.122477] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1069.124149] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.124149] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.124149] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1069.124149] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a87419c5-7b2c-4aa0-920b-a9ed5cbff9c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.131767] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1069.131767] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]526b6721-0368-1444-2547-57e9f1d7ac10" [ 1069.131767] env[62627]: _type = "Task" [ 1069.131767] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.141774] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526b6721-0368-1444-2547-57e9f1d7ac10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.245507] env[62627]: DEBUG oslo_concurrency.lockutils [req-13e9762e-bc11-4bf1-a683-da4fe9a761df req-f3a3f4fe-1dca-42cb-adc6-bc7dd3dd24ba service nova] Releasing lock "refresh_cache-3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.269295] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194857, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.324528] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "refresh_cache-e71794b9-22eb-4643-8587-f12a198024a4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.324528] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "refresh_cache-e71794b9-22eb-4643-8587-f12a198024a4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.324528] env[62627]: DEBUG nova.network.neutron [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1069.397086] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.864s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.422498] env[62627]: INFO nova.scheduler.client.report [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Deleted allocations for instance e263159d-e9ff-4a19-a29a-cb06eaf2d679 [ 1069.514918] env[62627]: DEBUG nova.compute.manager [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Received event network-vif-plugged-c77ae6ec-d1ea-4f56-b856-910a5835bc0d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1069.515110] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] Acquiring lock "e71794b9-22eb-4643-8587-f12a198024a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.515328] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] Lock "e71794b9-22eb-4643-8587-f12a198024a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.515526] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] Lock "e71794b9-22eb-4643-8587-f12a198024a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.515661] env[62627]: DEBUG nova.compute.manager [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] [instance: e71794b9-22eb-4643-8587-f12a198024a4] No waiting events found dispatching network-vif-plugged-c77ae6ec-d1ea-4f56-b856-910a5835bc0d {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1069.515824] env[62627]: WARNING nova.compute.manager [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Received unexpected event network-vif-plugged-c77ae6ec-d1ea-4f56-b856-910a5835bc0d for instance with vm_state building and task_state spawning. [ 1069.516806] env[62627]: DEBUG nova.compute.manager [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Received event network-changed-c77ae6ec-d1ea-4f56-b856-910a5835bc0d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1069.517062] env[62627]: DEBUG nova.compute.manager [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Refreshing instance network info cache due to event network-changed-c77ae6ec-d1ea-4f56-b856-910a5835bc0d. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1069.517249] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] Acquiring lock "refresh_cache-e71794b9-22eb-4643-8587-f12a198024a4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.644183] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526b6721-0368-1444-2547-57e9f1d7ac10, 'name': SearchDatastore_Task, 'duration_secs': 0.011492} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.644793] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.644793] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1069.644975] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.645141] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.645350] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1069.645603] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0961fc99-bda8-43fc-bf10-884dbcde4387 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.660346] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1069.660346] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1069.660346] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df7ea598-6ec8-4bf2-801a-b6a789b3707a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.665963] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1069.665963] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ef2961-3e70-c2b0-1564-926bdb12ecff" [ 1069.665963] env[62627]: _type = "Task" [ 1069.665963] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.676358] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ef2961-3e70-c2b0-1564-926bdb12ecff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.764974] env[62627]: DEBUG oslo_vmware.api [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194857, 'name': PowerOnVM_Task, 'duration_secs': 0.615048} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.765282] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1069.765505] env[62627]: INFO nova.compute.manager [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Took 9.69 seconds to spawn the instance on the hypervisor. [ 1069.765747] env[62627]: DEBUG nova.compute.manager [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1069.766589] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5369a1d-7213-4ee3-94f6-693b43adee73 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.874940] env[62627]: DEBUG nova.network.neutron [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1069.932122] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d524044e-d080-4f77-bf4d-2445d726bf84 tempest-ImagesTestJSON-765150101 tempest-ImagesTestJSON-765150101-project-member] Lock "e263159d-e9ff-4a19-a29a-cb06eaf2d679" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.620s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.150023] env[62627]: DEBUG nova.network.neutron [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Updating instance_info_cache with network_info: [{"id": "c77ae6ec-d1ea-4f56-b856-910a5835bc0d", "address": "fa:16:3e:e5:80:a2", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc77ae6ec-d1", "ovs_interfaceid": "c77ae6ec-d1ea-4f56-b856-910a5835bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.177300] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ef2961-3e70-c2b0-1564-926bdb12ecff, 'name': SearchDatastore_Task, 'duration_secs': 0.016593} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.178108] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d180f33d-8f11-47c7-ab29-08ce40ae04bb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.184237] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1070.184237] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523fcaed-c71d-df55-dee2-9e5a4e5ab53d" [ 1070.184237] env[62627]: _type = "Task" [ 1070.184237] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.192159] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523fcaed-c71d-df55-dee2-9e5a4e5ab53d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.300207] env[62627]: INFO nova.compute.manager [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Took 23.20 seconds to build instance. [ 1070.652964] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "refresh_cache-e71794b9-22eb-4643-8587-f12a198024a4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.652964] env[62627]: DEBUG nova.compute.manager [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Instance network_info: |[{"id": "c77ae6ec-d1ea-4f56-b856-910a5835bc0d", "address": "fa:16:3e:e5:80:a2", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc77ae6ec-d1", "ovs_interfaceid": "c77ae6ec-d1ea-4f56-b856-910a5835bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1070.652964] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] Acquired lock "refresh_cache-e71794b9-22eb-4643-8587-f12a198024a4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.656834] env[62627]: DEBUG nova.network.neutron [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Refreshing network info cache for port c77ae6ec-d1ea-4f56-b856-910a5835bc0d {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1070.658212] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:80:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f499bc9-78da-46c1-9274-19edf26d31cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c77ae6ec-d1ea-4f56-b856-910a5835bc0d', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1070.667207] env[62627]: DEBUG oslo.service.loopingcall [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1070.667806] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1070.668062] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9662dd8d-8035-4905-9bf1-bef985ce1dea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.691097] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1070.691097] env[62627]: value = "task-2194858" [ 1070.691097] env[62627]: _type = "Task" [ 1070.691097] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.694666] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523fcaed-c71d-df55-dee2-9e5a4e5ab53d, 'name': SearchDatastore_Task, 'duration_secs': 0.017622} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.697632] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.697891] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7/3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1070.698877] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea696971-f1e0-4689-aab9-9fcd3240735b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.706938] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194858, 'name': CreateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.708629] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1070.708629] env[62627]: value = "task-2194859" [ 1070.708629] env[62627]: _type = "Task" [ 1070.708629] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.717133] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.803648] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8edda547-9616-42a4-81e5-2a1eb35e4c72 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.710s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.165755] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "78307469-ebbe-45bc-a146-eaa298e9b707" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.166114] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "78307469-ebbe-45bc-a146-eaa298e9b707" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.206046] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194858, 'name': CreateVM_Task, 'duration_secs': 0.427552} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.206244] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1071.207273] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.207622] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.208141] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1071.208281] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb2cc2e6-8e64-4b30-b4e4-bfd6db3b2d44 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.218441] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1071.218441] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b1a3f1-4ec2-74cd-4b88-f3a33dadf4db" [ 1071.218441] env[62627]: _type = "Task" [ 1071.218441] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.224928] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194859, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514092} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.226052] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7/3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1071.226283] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1071.226534] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6b7c758-2fda-41a4-bdd8-6daef88d7c73 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.231607] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b1a3f1-4ec2-74cd-4b88-f3a33dadf4db, 'name': SearchDatastore_Task, 'duration_secs': 0.012135} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.232494] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.232723] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1071.232953] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.233353] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.233353] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1071.233638] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1dcfa191-a922-419d-8027-976e20ace155 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.241184] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1071.241184] env[62627]: value = "task-2194860" [ 1071.241184] env[62627]: _type = "Task" [ 1071.241184] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.250048] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1071.250329] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1071.251868] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebdd0878-ec57-4874-a801-3d34e8484b12 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.259393] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194860, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.261260] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1071.261260] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525273a0-c04e-d195-46a6-c7533f041589" [ 1071.261260] env[62627]: _type = "Task" [ 1071.261260] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.269590] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525273a0-c04e-d195-46a6-c7533f041589, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.528099] env[62627]: DEBUG nova.network.neutron [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Updated VIF entry in instance network info cache for port c77ae6ec-d1ea-4f56-b856-910a5835bc0d. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1071.528480] env[62627]: DEBUG nova.network.neutron [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Updating instance_info_cache with network_info: [{"id": "c77ae6ec-d1ea-4f56-b856-910a5835bc0d", "address": "fa:16:3e:e5:80:a2", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc77ae6ec-d1", "ovs_interfaceid": "c77ae6ec-d1ea-4f56-b856-910a5835bc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.671024] env[62627]: DEBUG nova.compute.manager [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1071.754361] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194860, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068279} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.754697] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1071.756126] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d3835f-ed24-4670-a05f-969dc13449de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.780061] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7/3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1071.783646] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acaf903b-c412-4f66-aec7-09f718801eda {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.810389] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525273a0-c04e-d195-46a6-c7533f041589, 'name': SearchDatastore_Task, 'duration_secs': 0.011047} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.811269] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1071.811269] env[62627]: value = "task-2194861" [ 1071.811269] env[62627]: _type = "Task" [ 1071.811269] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.811510] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-704252d7-d9ee-4c7e-883a-8f2e9e6b7640 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.819265] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1071.819265] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]526cd288-2faa-49fb-3cbd-278ff033d8ef" [ 1071.819265] env[62627]: _type = "Task" [ 1071.819265] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.833309] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194861, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.838801] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526cd288-2faa-49fb-3cbd-278ff033d8ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.033936] env[62627]: DEBUG oslo_concurrency.lockutils [req-1d708914-28ad-4b2e-a2cd-9e87ab873d85 req-17e697c9-5025-40fe-92b8-62889abdf153 service nova] Releasing lock "refresh_cache-e71794b9-22eb-4643-8587-f12a198024a4" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.196144] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.196425] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.199239] env[62627]: INFO nova.compute.claims [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1072.326771] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.332068] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526cd288-2faa-49fb-3cbd-278ff033d8ef, 'name': SearchDatastore_Task, 'duration_secs': 0.021986} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.332357] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.332663] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] e71794b9-22eb-4643-8587-f12a198024a4/e71794b9-22eb-4643-8587-f12a198024a4.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1072.332954] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5f9455b-469d-42b5-8d39-0bf4dede09a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.338911] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1072.338911] env[62627]: value = "task-2194862" [ 1072.338911] env[62627]: _type = "Task" [ 1072.338911] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.346961] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194862, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.547204] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "e5a1fa80-6579-4334-9705-33a95dbc3797" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.547413] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "e5a1fa80-6579-4334-9705-33a95dbc3797" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.547544] env[62627]: DEBUG nova.compute.manager [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1072.548471] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536fdfc1-4c30-482a-a61a-3e1d4d1fa18c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.555333] env[62627]: DEBUG nova.compute.manager [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62627) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1072.555847] env[62627]: DEBUG nova.objects.instance [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lazy-loading 'flavor' on Instance uuid e5a1fa80-6579-4334-9705-33a95dbc3797 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1072.824788] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194861, 'name': ReconfigVM_Task, 'duration_secs': 0.605753} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.825126] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7/3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1072.825676] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf1db192-a9d0-4d37-831f-e01a49a511ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.832824] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1072.832824] env[62627]: value = "task-2194863" [ 1072.832824] env[62627]: _type = "Task" [ 1072.832824] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.841399] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194863, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.848953] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194862, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496141} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.849106] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] e71794b9-22eb-4643-8587-f12a198024a4/e71794b9-22eb-4643-8587-f12a198024a4.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1072.849274] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1072.849833] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bfd50ef-3bb5-46bf-9523-ddc8949de967 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.856595] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1072.856595] env[62627]: value = "task-2194864" [ 1072.856595] env[62627]: _type = "Task" [ 1072.856595] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.866322] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194864, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.341859] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194863, 'name': Rename_Task, 'duration_secs': 0.181571} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.344591] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1073.345685] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a9d224a-8959-47e2-b69e-545db2bea5db {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.351338] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1073.351338] env[62627]: value = "task-2194865" [ 1073.351338] env[62627]: _type = "Task" [ 1073.351338] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.364648] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194865, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.367578] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194864, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065676} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.368084] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1073.368749] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16afad1c-4fd2-4250-81f3-502219f98c19 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.394097] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] e71794b9-22eb-4643-8587-f12a198024a4/e71794b9-22eb-4643-8587-f12a198024a4.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1073.396961] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c596732c-89ac-4689-bfa1-8cf0f742d922 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.415651] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1073.415651] env[62627]: value = "task-2194866" [ 1073.415651] env[62627]: _type = "Task" [ 1073.415651] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.425533] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194866, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.470260] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ce41cb-b668-451c-b03e-afe471c63a8b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.477647] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3a2d7c-c5e4-4daf-864c-d5d68726b176 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.506708] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3f34e7-7923-4439-acba-115266db9257 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.513771] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04805f75-2df2-4f98-a060-c44a3c677431 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.526557] env[62627]: DEBUG nova.compute.provider_tree [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1073.564332] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1073.564586] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10953e19-395b-4896-9bda-e55b455809d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.571480] env[62627]: DEBUG oslo_vmware.api [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1073.571480] env[62627]: value = "task-2194867" [ 1073.571480] env[62627]: _type = "Task" [ 1073.571480] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.581405] env[62627]: DEBUG oslo_vmware.api [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194867, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.862932] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194865, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.925137] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194866, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.029497] env[62627]: DEBUG nova.scheduler.client.report [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1074.080895] env[62627]: DEBUG oslo_vmware.api [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194867, 'name': PowerOffVM_Task, 'duration_secs': 0.236939} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.081109] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1074.081314] env[62627]: DEBUG nova.compute.manager [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1074.082080] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fff9f49-14dc-49a8-a61b-ed5b68387ff2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.362273] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194865, 'name': PowerOnVM_Task} progress is 92%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.427170] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194866, 'name': ReconfigVM_Task, 'duration_secs': 0.944891} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.427477] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Reconfigured VM instance instance-00000050 to attach disk [datastore2] e71794b9-22eb-4643-8587-f12a198024a4/e71794b9-22eb-4643-8587-f12a198024a4.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1074.428125] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b4b6d43-f292-45cb-b77d-8c11a9b40460 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.435031] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1074.435031] env[62627]: value = "task-2194868" [ 1074.435031] env[62627]: _type = "Task" [ 1074.435031] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.447401] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194868, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.535010] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.338s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.535620] env[62627]: DEBUG nova.compute.manager [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1074.594222] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a6f07438-982b-4698-9100-3a61b65b7bd5 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "e5a1fa80-6579-4334-9705-33a95dbc3797" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.862844] env[62627]: DEBUG oslo_vmware.api [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194865, 'name': PowerOnVM_Task, 'duration_secs': 1.128057} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.863140] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1074.863365] env[62627]: INFO nova.compute.manager [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Took 9.55 seconds to spawn the instance on the hypervisor. [ 1074.863531] env[62627]: DEBUG nova.compute.manager [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1074.864761] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ee7bce-44ae-4cb8-88b8-522b8a71c2cc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.946256] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194868, 'name': Rename_Task, 'duration_secs': 0.227383} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.946546] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1074.946791] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61765a28-56a9-479c-a7b9-f4b52d185db9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.954056] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1074.954056] env[62627]: value = "task-2194869" [ 1074.954056] env[62627]: _type = "Task" [ 1074.954056] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.961683] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194869, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.041916] env[62627]: DEBUG nova.compute.utils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1075.043647] env[62627]: DEBUG nova.compute.manager [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1075.043647] env[62627]: DEBUG nova.network.neutron [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1075.093298] env[62627]: DEBUG nova.policy [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4289413aec40498dab5f6170d69eb39a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74aed70194cf444884768a5c89fe8613', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1075.122648] env[62627]: DEBUG nova.objects.instance [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lazy-loading 'flavor' on Instance uuid e5a1fa80-6579-4334-9705-33a95dbc3797 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.383987] env[62627]: INFO nova.compute.manager [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Took 18.44 seconds to build instance. [ 1075.467374] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194869, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.518529] env[62627]: DEBUG nova.network.neutron [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Successfully created port: 75c54554-1d03-46ab-bb41-d84aca2543fe {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1075.535840] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquiring lock "aca062be-755d-4e41-b9ed-f28d2d3bf598" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.536088] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Lock "aca062be-755d-4e41-b9ed-f28d2d3bf598" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.547919] env[62627]: DEBUG nova.compute.manager [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1075.629736] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "refresh_cache-e5a1fa80-6579-4334-9705-33a95dbc3797" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1075.629884] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquired lock "refresh_cache-e5a1fa80-6579-4334-9705-33a95dbc3797" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.630080] env[62627]: DEBUG nova.network.neutron [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1075.630299] env[62627]: DEBUG nova.objects.instance [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lazy-loading 'info_cache' on Instance uuid e5a1fa80-6579-4334-9705-33a95dbc3797 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.864798] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.888525] env[62627]: DEBUG oslo_concurrency.lockutils [None req-550d9d6b-be14-4624-9114-6745a8738be6 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.948s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.888812] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.024s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.888998] env[62627]: DEBUG nova.compute.manager [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1075.889939] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf906ed-5119-4c04-a612-9a046a3f10bd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.896711] env[62627]: DEBUG nova.compute.manager [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62627) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1075.897420] env[62627]: DEBUG nova.objects.instance [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lazy-loading 'flavor' on Instance uuid 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.964699] env[62627]: DEBUG oslo_vmware.api [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194869, 'name': PowerOnVM_Task, 'duration_secs': 0.814808} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.965424] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1075.965424] env[62627]: INFO nova.compute.manager [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Took 8.22 seconds to spawn the instance on the hypervisor. [ 1075.965424] env[62627]: DEBUG nova.compute.manager [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1075.966216] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32babb32-a443-4675-b090-c53b695b821b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.038441] env[62627]: DEBUG nova.compute.manager [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1076.133770] env[62627]: DEBUG nova.objects.base [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1076.482659] env[62627]: INFO nova.compute.manager [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Took 17.92 seconds to build instance. [ 1076.560263] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.560514] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.561997] env[62627]: INFO nova.compute.claims [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1076.565618] env[62627]: DEBUG nova.compute.manager [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1076.594016] env[62627]: DEBUG nova.virt.hardware [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1076.594273] env[62627]: DEBUG nova.virt.hardware [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1076.594431] env[62627]: DEBUG nova.virt.hardware [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1076.594824] env[62627]: DEBUG nova.virt.hardware [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1076.594986] env[62627]: DEBUG nova.virt.hardware [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1076.595146] env[62627]: DEBUG nova.virt.hardware [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1076.595524] env[62627]: DEBUG nova.virt.hardware [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1076.595693] env[62627]: DEBUG nova.virt.hardware [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1076.595884] env[62627]: DEBUG nova.virt.hardware [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1076.596086] env[62627]: DEBUG nova.virt.hardware [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1076.596278] env[62627]: DEBUG nova.virt.hardware [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1076.597184] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa00374e-7a95-4f54-9774-9e4f02227084 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.605810] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735eada4-74d4-4e2f-a3f8-2ee9e1cc1f0d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.829596] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "e71794b9-22eb-4643-8587-f12a198024a4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.906131] env[62627]: DEBUG nova.network.neutron [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Updating instance_info_cache with network_info: [{"id": "6e993a7b-e7ba-4beb-ac82-cd2f2b111371", "address": "fa:16:3e:05:08:7e", "network": {"id": "d8124016-8788-4799-b1a6-b95e841f2a03", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-914036947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6324a68b7914b08847262f7773460cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c29724c-5452-441a-8060-5bf89d1f5847", "external-id": "nsx-vlan-transportzone-683", "segmentation_id": 683, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e993a7b-e7", "ovs_interfaceid": "6e993a7b-e7ba-4beb-ac82-cd2f2b111371", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.906131] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1076.906131] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41e483c3-53c4-4f1b-b165-39091f20f6d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.917033] env[62627]: DEBUG oslo_vmware.api [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1076.917033] env[62627]: value = "task-2194870" [ 1076.917033] env[62627]: _type = "Task" [ 1076.917033] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.928383] env[62627]: DEBUG oslo_vmware.api [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194870, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.988289] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d53390a5-f5e1-45f7-bab1-a000465b0ab6 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "e71794b9-22eb-4643-8587-f12a198024a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.431s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.988548] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "e71794b9-22eb-4643-8587-f12a198024a4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.159s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.988858] env[62627]: DEBUG nova.compute.manager [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1076.990763] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b993fe18-c576-4f74-8519-a59f16313f21 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.999624] env[62627]: DEBUG nova.compute.manager [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62627) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1077.000789] env[62627]: DEBUG nova.objects.instance [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lazy-loading 'flavor' on Instance uuid e71794b9-22eb-4643-8587-f12a198024a4 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.007562] env[62627]: DEBUG nova.compute.manager [req-98c31e73-79f6-49a7-93e6-35216c902cc9 req-e0e229e9-015a-46f1-a813-e0275fc406b6 service nova] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Received event network-vif-plugged-75c54554-1d03-46ab-bb41-d84aca2543fe {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1077.008997] env[62627]: DEBUG oslo_concurrency.lockutils [req-98c31e73-79f6-49a7-93e6-35216c902cc9 req-e0e229e9-015a-46f1-a813-e0275fc406b6 service nova] Acquiring lock "78307469-ebbe-45bc-a146-eaa298e9b707-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.010115] env[62627]: DEBUG oslo_concurrency.lockutils [req-98c31e73-79f6-49a7-93e6-35216c902cc9 req-e0e229e9-015a-46f1-a813-e0275fc406b6 service nova] Lock "78307469-ebbe-45bc-a146-eaa298e9b707-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.010646] env[62627]: DEBUG oslo_concurrency.lockutils [req-98c31e73-79f6-49a7-93e6-35216c902cc9 req-e0e229e9-015a-46f1-a813-e0275fc406b6 service nova] Lock "78307469-ebbe-45bc-a146-eaa298e9b707-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.010646] env[62627]: DEBUG nova.compute.manager [req-98c31e73-79f6-49a7-93e6-35216c902cc9 req-e0e229e9-015a-46f1-a813-e0275fc406b6 service nova] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] No waiting events found dispatching network-vif-plugged-75c54554-1d03-46ab-bb41-d84aca2543fe {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1077.010762] env[62627]: WARNING nova.compute.manager [req-98c31e73-79f6-49a7-93e6-35216c902cc9 req-e0e229e9-015a-46f1-a813-e0275fc406b6 service nova] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Received unexpected event network-vif-plugged-75c54554-1d03-46ab-bb41-d84aca2543fe for instance with vm_state building and task_state spawning. [ 1077.117032] env[62627]: DEBUG nova.network.neutron [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Successfully updated port: 75c54554-1d03-46ab-bb41-d84aca2543fe {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1077.406834] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Releasing lock "refresh_cache-e5a1fa80-6579-4334-9705-33a95dbc3797" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.429027] env[62627]: DEBUG oslo_vmware.api [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194870, 'name': PowerOffVM_Task, 'duration_secs': 0.288256} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.429027] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.429027] env[62627]: DEBUG nova.compute.manager [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1077.429590] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c299853d-c26f-4577-a201-bd09e30fff45 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.186619] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "refresh_cache-78307469-ebbe-45bc-a146-eaa298e9b707" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.186932] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "refresh_cache-78307469-ebbe-45bc-a146-eaa298e9b707" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.186932] env[62627]: DEBUG nova.network.neutron [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1078.192170] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4aa48f7f-62e9-41bf-a697-335a735e8494 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.303s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.412214] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830981ac-e0be-421a-a0ce-c262ce08c342 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.419983] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ce406c-9937-4a59-bcaa-5dc17f560b36 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.450552] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b50210-5abc-4415-8115-1f0789565d0c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.458550] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c3320e-fd83-4a15-a3c6-235a175eec26 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.472133] env[62627]: DEBUG nova.compute.provider_tree [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.694402] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1078.698776] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a5410e6-5bef-44c5-b4ab-3ccba37894e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.700129] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.700356] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.700545] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.700749] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.700887] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.702632] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1078.703118] env[62627]: INFO nova.compute.manager [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Terminating instance [ 1078.704714] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebef8945-a825-4adc-bfd7-a8e9b9a40500 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.712951] env[62627]: DEBUG oslo_vmware.api [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1078.712951] env[62627]: value = "task-2194871" [ 1078.712951] env[62627]: _type = "Task" [ 1078.712951] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.713972] env[62627]: DEBUG oslo_vmware.api [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1078.713972] env[62627]: value = "task-2194872" [ 1078.713972] env[62627]: _type = "Task" [ 1078.713972] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.727309] env[62627]: DEBUG oslo_vmware.api [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194871, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.729720] env[62627]: DEBUG oslo_vmware.api [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194872, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.739533] env[62627]: DEBUG nova.network.neutron [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1078.884071] env[62627]: DEBUG nova.network.neutron [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Updating instance_info_cache with network_info: [{"id": "75c54554-1d03-46ab-bb41-d84aca2543fe", "address": "fa:16:3e:f3:f0:a9", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75c54554-1d", "ovs_interfaceid": "75c54554-1d03-46ab-bb41-d84aca2543fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.975423] env[62627]: DEBUG nova.scheduler.client.report [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1079.028796] env[62627]: DEBUG nova.compute.manager [req-e01d5470-ec81-4b83-a621-34fa81e07ed0 req-1fb616f9-8311-4ac2-b532-2774f614bf2c service nova] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Received event network-changed-75c54554-1d03-46ab-bb41-d84aca2543fe {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1079.028796] env[62627]: DEBUG nova.compute.manager [req-e01d5470-ec81-4b83-a621-34fa81e07ed0 req-1fb616f9-8311-4ac2-b532-2774f614bf2c service nova] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Refreshing instance network info cache due to event network-changed-75c54554-1d03-46ab-bb41-d84aca2543fe. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1079.028796] env[62627]: DEBUG oslo_concurrency.lockutils [req-e01d5470-ec81-4b83-a621-34fa81e07ed0 req-1fb616f9-8311-4ac2-b532-2774f614bf2c service nova] Acquiring lock "refresh_cache-78307469-ebbe-45bc-a146-eaa298e9b707" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.213059] env[62627]: DEBUG nova.compute.manager [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1079.213059] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1079.213394] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a377d43-eef1-4751-9e59-036b41e24359 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.224253] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1079.227345] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d2b6992-4983-49be-9091-8365cf462e9f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.228596] env[62627]: DEBUG oslo_vmware.api [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194871, 'name': PowerOnVM_Task, 'duration_secs': 0.457831} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.228848] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1079.229063] env[62627]: DEBUG nova.compute.manager [None req-dd5cf642-da80-4d56-b8cb-cfbd2cc8fee6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1079.232432] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bdc942-3133-4b83-ada7-fe4a6a8a3b73 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.234599] env[62627]: DEBUG oslo_vmware.api [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194872, 'name': PowerOffVM_Task, 'duration_secs': 0.170131} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.234829] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1079.235016] env[62627]: DEBUG nova.compute.manager [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1079.235925] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050e3802-107c-4470-9c47-380c9041e368 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.298958] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1079.299288] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1079.299732] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleting the datastore file [datastore2] 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1079.299992] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b041aeaf-5cc1-48c8-a314-48c32283df04 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.306937] env[62627]: DEBUG oslo_vmware.api [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1079.306937] env[62627]: value = "task-2194874" [ 1079.306937] env[62627]: _type = "Task" [ 1079.306937] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.314926] env[62627]: DEBUG oslo_vmware.api [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194874, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.387642] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "refresh_cache-78307469-ebbe-45bc-a146-eaa298e9b707" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.388164] env[62627]: DEBUG nova.compute.manager [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Instance network_info: |[{"id": "75c54554-1d03-46ab-bb41-d84aca2543fe", "address": "fa:16:3e:f3:f0:a9", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75c54554-1d", "ovs_interfaceid": "75c54554-1d03-46ab-bb41-d84aca2543fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1079.388567] env[62627]: DEBUG oslo_concurrency.lockutils [req-e01d5470-ec81-4b83-a621-34fa81e07ed0 req-1fb616f9-8311-4ac2-b532-2774f614bf2c service nova] Acquired lock "refresh_cache-78307469-ebbe-45bc-a146-eaa298e9b707" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.388764] env[62627]: DEBUG nova.network.neutron [req-e01d5470-ec81-4b83-a621-34fa81e07ed0 req-1fb616f9-8311-4ac2-b532-2774f614bf2c service nova] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Refreshing network info cache for port 75c54554-1d03-46ab-bb41-d84aca2543fe {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1079.390319] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:f0:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '098df9b7-d759-47f7-b756-334848cb423b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75c54554-1d03-46ab-bb41-d84aca2543fe', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1079.400362] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Creating folder: Project (74aed70194cf444884768a5c89fe8613). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1079.401391] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c541d06-c2d9-4613-978a-b1dcbc8497ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.413082] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Created folder: Project (74aed70194cf444884768a5c89fe8613) in parent group-v447541. [ 1079.413271] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Creating folder: Instances. Parent ref: group-v447756. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1079.413492] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80804891-4c14-4bfa-b9a6-7327563dcaeb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.423099] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Created folder: Instances in parent group-v447756. [ 1079.423320] env[62627]: DEBUG oslo.service.loopingcall [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1079.423495] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1079.423679] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e73ac47-ecc4-4425-acd2-8161a73708e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.442303] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1079.442303] env[62627]: value = "task-2194877" [ 1079.442303] env[62627]: _type = "Task" [ 1079.442303] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.451477] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194877, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.480822] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.920s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.481352] env[62627]: DEBUG nova.compute.manager [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1079.749735] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56bca400-9596-4c6f-ab46-78c250b63e04 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "e71794b9-22eb-4643-8587-f12a198024a4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.761s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.816826] env[62627]: DEBUG oslo_vmware.api [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194874, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.329529} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.817122] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1079.817334] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1079.817508] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1079.817679] env[62627]: INFO nova.compute.manager [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1079.818229] env[62627]: DEBUG oslo.service.loopingcall [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1079.818229] env[62627]: DEBUG nova.compute.manager [-] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1079.818229] env[62627]: DEBUG nova.network.neutron [-] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1079.951956] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194877, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.988083] env[62627]: DEBUG nova.compute.utils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1079.988083] env[62627]: DEBUG nova.compute.manager [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1079.988083] env[62627]: DEBUG nova.network.neutron [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1080.039240] env[62627]: DEBUG nova.policy [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fb0303b801b468a84350e2e01181757', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4c55337417dc48978dbf0aa775d3d76b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1080.260370] env[62627]: DEBUG nova.network.neutron [req-e01d5470-ec81-4b83-a621-34fa81e07ed0 req-1fb616f9-8311-4ac2-b532-2774f614bf2c service nova] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Updated VIF entry in instance network info cache for port 75c54554-1d03-46ab-bb41-d84aca2543fe. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1080.260652] env[62627]: DEBUG nova.network.neutron [req-e01d5470-ec81-4b83-a621-34fa81e07ed0 req-1fb616f9-8311-4ac2-b532-2774f614bf2c service nova] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Updating instance_info_cache with network_info: [{"id": "75c54554-1d03-46ab-bb41-d84aca2543fe", "address": "fa:16:3e:f3:f0:a9", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75c54554-1d", "ovs_interfaceid": "75c54554-1d03-46ab-bb41-d84aca2543fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.370528] env[62627]: DEBUG nova.network.neutron [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Successfully created port: a3ecaa14-2705-45f1-a66a-eb481153f099 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.391549] env[62627]: INFO nova.compute.manager [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Rebuilding instance [ 1080.438573] env[62627]: DEBUG nova.compute.manager [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1080.439448] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f1e15c-1917-411d-9be2-b291b26c4a32 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.459711] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194877, 'name': CreateVM_Task, 'duration_secs': 0.509596} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.460985] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1080.462199] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.462877] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.462877] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1080.463175] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-048dff01-840e-428f-808f-fc834394386f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.467639] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1080.467639] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5281566a-c93c-db13-d982-fc0a4e8ace62" [ 1080.467639] env[62627]: _type = "Task" [ 1080.467639] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.475630] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5281566a-c93c-db13-d982-fc0a4e8ace62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.490383] env[62627]: DEBUG nova.compute.manager [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1080.684924] env[62627]: DEBUG nova.network.neutron [-] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.762913] env[62627]: DEBUG oslo_concurrency.lockutils [req-e01d5470-ec81-4b83-a621-34fa81e07ed0 req-1fb616f9-8311-4ac2-b532-2774f614bf2c service nova] Releasing lock "refresh_cache-78307469-ebbe-45bc-a146-eaa298e9b707" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.980844] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5281566a-c93c-db13-d982-fc0a4e8ace62, 'name': SearchDatastore_Task, 'duration_secs': 0.009336} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.981192] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.981447] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1080.981704] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.982762] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.982762] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1080.982762] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a860371a-f7f9-45b8-981d-b133e7449fe2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.991546] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1080.991744] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1080.992536] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41cde53f-71f4-405c-98d1-b28f68dfe881 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.001962] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1081.001962] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e70ea2-b714-23a6-42e8-43fc109cd6ac" [ 1081.001962] env[62627]: _type = "Task" [ 1081.001962] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.010303] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e70ea2-b714-23a6-42e8-43fc109cd6ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.188105] env[62627]: INFO nova.compute.manager [-] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Took 1.37 seconds to deallocate network for instance. [ 1081.237532] env[62627]: DEBUG nova.compute.manager [req-4533c542-6654-4333-8828-15817811e1fa req-fe0b6810-0991-4259-bbf7-1a34e5a5e12e service nova] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Received event network-vif-deleted-8170eace-a3a9-4b61-8d61-10d08aa65847 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1081.465170] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1081.465576] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5748853-4711-45b9-95f3-e036846934ea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.473247] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1081.473247] env[62627]: value = "task-2194878" [ 1081.473247] env[62627]: _type = "Task" [ 1081.473247] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.482241] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194878, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.500298] env[62627]: DEBUG nova.compute.manager [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1081.512739] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e70ea2-b714-23a6-42e8-43fc109cd6ac, 'name': SearchDatastore_Task, 'duration_secs': 0.009156} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.513516] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb870f52-c449-458a-9a11-708389e26155 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.519038] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1081.519038] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529b2e72-98a2-7662-024b-890c4fed7eaa" [ 1081.519038] env[62627]: _type = "Task" [ 1081.519038] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.527079] env[62627]: DEBUG nova.virt.hardware [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1081.527333] env[62627]: DEBUG nova.virt.hardware [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.527687] env[62627]: DEBUG nova.virt.hardware [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1081.528203] env[62627]: DEBUG nova.virt.hardware [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.528391] env[62627]: DEBUG nova.virt.hardware [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1081.528545] env[62627]: DEBUG nova.virt.hardware [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1081.529149] env[62627]: DEBUG nova.virt.hardware [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1081.529149] env[62627]: DEBUG nova.virt.hardware [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1081.529273] env[62627]: DEBUG nova.virt.hardware [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1081.529413] env[62627]: DEBUG nova.virt.hardware [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1081.529590] env[62627]: DEBUG nova.virt.hardware [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1081.530396] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1659433-7bff-47a6-a80b-01e38f198e98 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.536129] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529b2e72-98a2-7662-024b-890c4fed7eaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.541421] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8da295-b1a0-4f40-9bea-e2d543f00d8e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.694605] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.694873] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.695110] env[62627]: DEBUG nova.objects.instance [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lazy-loading 'resources' on Instance uuid 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.983758] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1081.983889] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1081.984757] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4f404f-07b2-4afc-a6a5-fe4505aa15da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.993022] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1081.993298] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9bcdb5c-d933-4a9c-852f-838820046ff6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.032484] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529b2e72-98a2-7662-024b-890c4fed7eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.013894} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.032798] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.033133] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 78307469-ebbe-45bc-a146-eaa298e9b707/78307469-ebbe-45bc-a146-eaa298e9b707.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1082.033443] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae3a60ee-dffb-467f-a4d0-c3c48ef0af69 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.040545] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1082.040545] env[62627]: value = "task-2194880" [ 1082.040545] env[62627]: _type = "Task" [ 1082.040545] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.050869] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.059018] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1082.059245] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1082.059421] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleting the datastore file [datastore2] e71794b9-22eb-4643-8587-f12a198024a4 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.059673] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79281203-abcb-4d7d-9abc-72baf131f978 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.065581] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1082.065581] env[62627]: value = "task-2194881" [ 1082.065581] env[62627]: _type = "Task" [ 1082.065581] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.074356] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.080514] env[62627]: DEBUG nova.compute.manager [req-0f5f881e-e445-48fb-9996-863d3fa8dd71 req-a3b2dc80-d690-4799-9623-b0501d957479 service nova] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Received event network-vif-plugged-a3ecaa14-2705-45f1-a66a-eb481153f099 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1082.080732] env[62627]: DEBUG oslo_concurrency.lockutils [req-0f5f881e-e445-48fb-9996-863d3fa8dd71 req-a3b2dc80-d690-4799-9623-b0501d957479 service nova] Acquiring lock "aca062be-755d-4e41-b9ed-f28d2d3bf598-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.080977] env[62627]: DEBUG oslo_concurrency.lockutils [req-0f5f881e-e445-48fb-9996-863d3fa8dd71 req-a3b2dc80-d690-4799-9623-b0501d957479 service nova] Lock "aca062be-755d-4e41-b9ed-f28d2d3bf598-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.081182] env[62627]: DEBUG oslo_concurrency.lockutils [req-0f5f881e-e445-48fb-9996-863d3fa8dd71 req-a3b2dc80-d690-4799-9623-b0501d957479 service nova] Lock "aca062be-755d-4e41-b9ed-f28d2d3bf598-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.081383] env[62627]: DEBUG nova.compute.manager [req-0f5f881e-e445-48fb-9996-863d3fa8dd71 req-a3b2dc80-d690-4799-9623-b0501d957479 service nova] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] No waiting events found dispatching network-vif-plugged-a3ecaa14-2705-45f1-a66a-eb481153f099 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1082.081604] env[62627]: WARNING nova.compute.manager [req-0f5f881e-e445-48fb-9996-863d3fa8dd71 req-a3b2dc80-d690-4799-9623-b0501d957479 service nova] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Received unexpected event network-vif-plugged-a3ecaa14-2705-45f1-a66a-eb481153f099 for instance with vm_state building and task_state spawning. [ 1082.171898] env[62627]: DEBUG nova.network.neutron [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Successfully updated port: a3ecaa14-2705-45f1-a66a-eb481153f099 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1082.550727] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194880, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.552226] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc85e5d-e013-4f97-ba37-6f412c6049e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.558426] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500a7201-00b6-4bb4-bbe6-ebe51d285b65 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.590456] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2a53b1-1c4d-48d6-919a-b506679fce59 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.597445] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194881, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267815} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.599471] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1082.599660] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1082.599833] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1082.603255] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc9494f-f924-4bdb-b17f-6bc1fec91247 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.617712] env[62627]: DEBUG nova.compute.provider_tree [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.666372] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.667059] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.667059] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.667059] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.667276] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.669348] env[62627]: INFO nova.compute.manager [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Terminating instance [ 1082.674816] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquiring lock "refresh_cache-aca062be-755d-4e41-b9ed-f28d2d3bf598" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.674816] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquired lock "refresh_cache-aca062be-755d-4e41-b9ed-f28d2d3bf598" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.674816] env[62627]: DEBUG nova.network.neutron [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1083.051741] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194880, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532808} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.052056] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 78307469-ebbe-45bc-a146-eaa298e9b707/78307469-ebbe-45bc-a146-eaa298e9b707.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1083.052320] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1083.052581] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df945346-df34-4d6f-90c3-3db5713bbd75 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.058591] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1083.058591] env[62627]: value = "task-2194882" [ 1083.058591] env[62627]: _type = "Task" [ 1083.058591] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.065545] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194882, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.120337] env[62627]: DEBUG nova.scheduler.client.report [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1083.173339] env[62627]: DEBUG nova.compute.manager [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1083.173517] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1083.174441] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5a26cd-3f15-42c8-934e-a1f0bc6c5c5e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.184062] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1083.184325] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff8c3cfb-94d3-494f-9158-ae02068fc092 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.191563] env[62627]: DEBUG oslo_vmware.api [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1083.191563] env[62627]: value = "task-2194883" [ 1083.191563] env[62627]: _type = "Task" [ 1083.191563] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.199544] env[62627]: DEBUG oslo_vmware.api [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194883, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.207127] env[62627]: DEBUG nova.network.neutron [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1083.345519] env[62627]: DEBUG nova.network.neutron [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Updating instance_info_cache with network_info: [{"id": "a3ecaa14-2705-45f1-a66a-eb481153f099", "address": "fa:16:3e:b8:97:5a", "network": {"id": "053f804e-7607-49fd-a528-125b6a08a6ae", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1914615995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c55337417dc48978dbf0aa775d3d76b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ecaa14-27", "ovs_interfaceid": "a3ecaa14-2705-45f1-a66a-eb481153f099", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.568354] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194882, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087065} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.568637] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1083.569396] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbdb08a-7500-43e7-b70f-6e58ccdbd2a2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.590712] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 78307469-ebbe-45bc-a146-eaa298e9b707/78307469-ebbe-45bc-a146-eaa298e9b707.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1083.591009] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-728f604b-cf8d-4094-82b7-4a8c5b1afdff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.610295] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1083.610295] env[62627]: value = "task-2194884" [ 1083.610295] env[62627]: _type = "Task" [ 1083.610295] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.621207] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194884, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.624990] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.930s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.640894] env[62627]: DEBUG nova.virt.hardware [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.641155] env[62627]: DEBUG nova.virt.hardware [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.641316] env[62627]: DEBUG nova.virt.hardware [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.641497] env[62627]: DEBUG nova.virt.hardware [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.641642] env[62627]: DEBUG nova.virt.hardware [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.641786] env[62627]: DEBUG nova.virt.hardware [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.641986] env[62627]: DEBUG nova.virt.hardware [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.642193] env[62627]: DEBUG nova.virt.hardware [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.642379] env[62627]: DEBUG nova.virt.hardware [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.642544] env[62627]: DEBUG nova.virt.hardware [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.642714] env[62627]: DEBUG nova.virt.hardware [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.643584] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404431f8-2ab0-4318-a0fe-54a450fe1379 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.647772] env[62627]: INFO nova.scheduler.client.report [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted allocations for instance 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7 [ 1083.656895] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93193cca-c418-4f02-8a1a-9e42f3d798af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.672925] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:80:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f499bc9-78da-46c1-9274-19edf26d31cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c77ae6ec-d1ea-4f56-b856-910a5835bc0d', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.681118] env[62627]: DEBUG oslo.service.loopingcall [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.681368] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.681600] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af27c19a-a99b-481f-8cbf-52ea2c1a7ad5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.704828] env[62627]: DEBUG oslo_vmware.api [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194883, 'name': PowerOffVM_Task, 'duration_secs': 0.206437} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.706014] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1083.706227] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1083.706456] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.706456] env[62627]: value = "task-2194885" [ 1083.706456] env[62627]: _type = "Task" [ 1083.706456] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.706630] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7304a5eb-a0c6-49d3-97e8-c79a531fa91c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.717014] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194885, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.848415] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Releasing lock "refresh_cache-aca062be-755d-4e41-b9ed-f28d2d3bf598" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.848568] env[62627]: DEBUG nova.compute.manager [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Instance network_info: |[{"id": "a3ecaa14-2705-45f1-a66a-eb481153f099", "address": "fa:16:3e:b8:97:5a", "network": {"id": "053f804e-7607-49fd-a528-125b6a08a6ae", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1914615995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c55337417dc48978dbf0aa775d3d76b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ecaa14-27", "ovs_interfaceid": "a3ecaa14-2705-45f1-a66a-eb481153f099", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1083.849021] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:97:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3ecaa14-2705-45f1-a66a-eb481153f099', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.856172] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Creating folder: Project (4c55337417dc48978dbf0aa775d3d76b). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1083.856443] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a496ba8-1c10-4be1-b098-d7c4993bb4e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.891711] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Created folder: Project (4c55337417dc48978dbf0aa775d3d76b) in parent group-v447541. [ 1083.891905] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Creating folder: Instances. Parent ref: group-v447760. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1083.892252] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a297fb2c-64c5-4ef2-b6c5-d99730d2688e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.901964] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Created folder: Instances in parent group-v447760. [ 1083.902284] env[62627]: DEBUG oslo.service.loopingcall [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.902480] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.902674] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a22f265f-91c1-494a-b8ea-60459e3f66f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.920666] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.920666] env[62627]: value = "task-2194889" [ 1083.920666] env[62627]: _type = "Task" [ 1083.920666] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.928153] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194889, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.935186] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1083.935328] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1083.935518] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Deleting the datastore file [datastore2] 5edf4b5b-93b1-4efc-80ec-f3a7009d5449 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1083.935729] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a899a16-97f4-4b0d-8bfa-f5d78e7cc35c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.942125] env[62627]: DEBUG oslo_vmware.api [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1083.942125] env[62627]: value = "task-2194890" [ 1083.942125] env[62627]: _type = "Task" [ 1083.942125] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.949337] env[62627]: DEBUG oslo_vmware.api [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194890, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.113491] env[62627]: DEBUG nova.compute.manager [req-c75d979b-8f6e-4b1e-91a5-82c52e6b5dc1 req-f0e7a461-8717-4f3f-80ba-b970660755af service nova] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Received event network-changed-a3ecaa14-2705-45f1-a66a-eb481153f099 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1084.113836] env[62627]: DEBUG nova.compute.manager [req-c75d979b-8f6e-4b1e-91a5-82c52e6b5dc1 req-f0e7a461-8717-4f3f-80ba-b970660755af service nova] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Refreshing instance network info cache due to event network-changed-a3ecaa14-2705-45f1-a66a-eb481153f099. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1084.113892] env[62627]: DEBUG oslo_concurrency.lockutils [req-c75d979b-8f6e-4b1e-91a5-82c52e6b5dc1 req-f0e7a461-8717-4f3f-80ba-b970660755af service nova] Acquiring lock "refresh_cache-aca062be-755d-4e41-b9ed-f28d2d3bf598" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.114227] env[62627]: DEBUG oslo_concurrency.lockutils [req-c75d979b-8f6e-4b1e-91a5-82c52e6b5dc1 req-f0e7a461-8717-4f3f-80ba-b970660755af service nova] Acquired lock "refresh_cache-aca062be-755d-4e41-b9ed-f28d2d3bf598" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.114439] env[62627]: DEBUG nova.network.neutron [req-c75d979b-8f6e-4b1e-91a5-82c52e6b5dc1 req-f0e7a461-8717-4f3f-80ba-b970660755af service nova] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Refreshing network info cache for port a3ecaa14-2705-45f1-a66a-eb481153f099 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1084.126884] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194884, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.161059] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8fa51ddd-9cc5-4d9e-b522-0065a6986d81 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.460s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.219142] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194885, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.431553] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194889, 'name': CreateVM_Task, 'duration_secs': 0.345916} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.431712] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1084.432420] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.432585] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.432909] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1084.433178] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b80f000-65b7-4ab7-acb0-a8f400fbdb2a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.438124] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for the task: (returnval){ [ 1084.438124] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52bb7c9e-c69d-28d2-6d61-050d6ceb561a" [ 1084.438124] env[62627]: _type = "Task" [ 1084.438124] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.448369] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52bb7c9e-c69d-28d2-6d61-050d6ceb561a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.452821] env[62627]: DEBUG oslo_vmware.api [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194890, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280835} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.453055] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1084.453242] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1084.453415] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1084.453582] env[62627]: INFO nova.compute.manager [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1084.453810] env[62627]: DEBUG oslo.service.loopingcall [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1084.453990] env[62627]: DEBUG nova.compute.manager [-] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1084.454100] env[62627]: DEBUG nova.network.neutron [-] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1084.627136] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194884, 'name': ReconfigVM_Task, 'duration_secs': 0.679697} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.627420] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 78307469-ebbe-45bc-a146-eaa298e9b707/78307469-ebbe-45bc-a146-eaa298e9b707.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1084.627988] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-efb349da-eaf0-4aac-9e9b-6e93f734bbfb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.634665] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1084.634665] env[62627]: value = "task-2194891" [ 1084.634665] env[62627]: _type = "Task" [ 1084.634665] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.643482] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194891, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.720523] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194885, 'name': CreateVM_Task, 'duration_secs': 0.607127} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.720778] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1084.721548] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.948857] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52bb7c9e-c69d-28d2-6d61-050d6ceb561a, 'name': SearchDatastore_Task, 'duration_secs': 0.030664} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.949157] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.949396] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.949627] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.949772] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.949943] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.950246] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.950555] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1084.950779] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93956298-8366-4fad-ae15-815162c2c647 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.952504] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-233530f5-f916-475f-9a47-ae0688ce5c93 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.954791] env[62627]: DEBUG nova.network.neutron [req-c75d979b-8f6e-4b1e-91a5-82c52e6b5dc1 req-f0e7a461-8717-4f3f-80ba-b970660755af service nova] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Updated VIF entry in instance network info cache for port a3ecaa14-2705-45f1-a66a-eb481153f099. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1084.955123] env[62627]: DEBUG nova.network.neutron [req-c75d979b-8f6e-4b1e-91a5-82c52e6b5dc1 req-f0e7a461-8717-4f3f-80ba-b970660755af service nova] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Updating instance_info_cache with network_info: [{"id": "a3ecaa14-2705-45f1-a66a-eb481153f099", "address": "fa:16:3e:b8:97:5a", "network": {"id": "053f804e-7607-49fd-a528-125b6a08a6ae", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1914615995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c55337417dc48978dbf0aa775d3d76b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3ecaa14-27", "ovs_interfaceid": "a3ecaa14-2705-45f1-a66a-eb481153f099", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.961888] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1084.961888] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523dfa72-8196-b44c-fbb3-03e1e8e9db12" [ 1084.961888] env[62627]: _type = "Task" [ 1084.961888] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.963249] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.963381] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.967876] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f21c3623-ec7b-4777-8006-1f8face11686 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.976779] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523dfa72-8196-b44c-fbb3-03e1e8e9db12, 'name': SearchDatastore_Task, 'duration_secs': 0.008506} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.978099] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.978354] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.978537] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.978811] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for the task: (returnval){ [ 1084.978811] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ecdbc0-ce44-ab9c-b72f-952c1662b28f" [ 1084.978811] env[62627]: _type = "Task" [ 1084.978811] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.988676] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ecdbc0-ce44-ab9c-b72f-952c1662b28f, 'name': SearchDatastore_Task, 'duration_secs': 0.008999} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.989919] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbae6bd7-2d52-4c9c-a1fe-3d13d708ba07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.995247] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for the task: (returnval){ [ 1084.995247] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]526b9ebd-5a0e-1a58-c3d6-76336ce64fce" [ 1084.995247] env[62627]: _type = "Task" [ 1084.995247] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.002576] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526b9ebd-5a0e-1a58-c3d6-76336ce64fce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.143794] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194891, 'name': Rename_Task, 'duration_secs': 0.339404} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.144082] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1085.144325] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4abeb688-464d-4fbf-94b9-e542cca0d5ca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.150607] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1085.150607] env[62627]: value = "task-2194892" [ 1085.150607] env[62627]: _type = "Task" [ 1085.150607] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.157921] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194892, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.302263] env[62627]: DEBUG nova.network.neutron [-] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.459056] env[62627]: DEBUG oslo_concurrency.lockutils [req-c75d979b-8f6e-4b1e-91a5-82c52e6b5dc1 req-f0e7a461-8717-4f3f-80ba-b970660755af service nova] Releasing lock "refresh_cache-aca062be-755d-4e41-b9ed-f28d2d3bf598" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.507924] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526b9ebd-5a0e-1a58-c3d6-76336ce64fce, 'name': SearchDatastore_Task, 'duration_secs': 0.012189} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.508325] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.508623] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] aca062be-755d-4e41-b9ed-f28d2d3bf598/aca062be-755d-4e41-b9ed-f28d2d3bf598.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1085.508936] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.509259] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1085.509392] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d9cb43e-0074-4fe6-af7a-3170198c73c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.511346] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a6e0619-fb71-4a0d-b362-ac2a56ea8920 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.517927] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for the task: (returnval){ [ 1085.517927] env[62627]: value = "task-2194893" [ 1085.517927] env[62627]: _type = "Task" [ 1085.517927] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.522034] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1085.522034] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1085.522678] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae157887-1f36-470a-a1a0-4a83455bfee8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.528822] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194893, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.531572] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1085.531572] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d3bf31-cd35-d682-a031-d412c4363abf" [ 1085.531572] env[62627]: _type = "Task" [ 1085.531572] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.538732] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d3bf31-cd35-d682-a031-d412c4363abf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.665676] env[62627]: DEBUG oslo_vmware.api [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194892, 'name': PowerOnVM_Task, 'duration_secs': 0.502768} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.665676] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1085.665676] env[62627]: INFO nova.compute.manager [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Took 9.10 seconds to spawn the instance on the hypervisor. [ 1085.665676] env[62627]: DEBUG nova.compute.manager [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1085.665676] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fad583-9aa1-47a7-92eb-9d42fb1c9128 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.762507] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "0ee04bf9-6aa6-485c-a979-191d52af236d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.762757] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "0ee04bf9-6aa6-485c-a979-191d52af236d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.804897] env[62627]: INFO nova.compute.manager [-] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Took 1.35 seconds to deallocate network for instance. [ 1086.027801] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194893, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460246} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.028089] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] aca062be-755d-4e41-b9ed-f28d2d3bf598/aca062be-755d-4e41-b9ed-f28d2d3bf598.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1086.028308] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1086.028580] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c758c9fe-a8f9-4533-8ec9-51a3f262d4c9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.037152] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for the task: (returnval){ [ 1086.037152] env[62627]: value = "task-2194894" [ 1086.037152] env[62627]: _type = "Task" [ 1086.037152] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.043481] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d3bf31-cd35-d682-a031-d412c4363abf, 'name': SearchDatastore_Task, 'duration_secs': 0.010982} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.044615] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e45a8e8-108d-482c-a022-6628465c308b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.049924] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194894, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.052958] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1086.052958] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525de747-6ef3-6c8c-e4b8-b235a2ffcebc" [ 1086.052958] env[62627]: _type = "Task" [ 1086.052958] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.060217] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525de747-6ef3-6c8c-e4b8-b235a2ffcebc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.148544] env[62627]: DEBUG nova.compute.manager [req-e774b86a-20af-43eb-afa7-28bd5caa1d91 req-513eab8e-c782-414c-8b0d-3ae0cdd341e5 service nova] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Received event network-vif-deleted-7a1d4f75-d881-4698-995b-8545c20494e7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1086.180644] env[62627]: INFO nova.compute.manager [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Took 14.00 seconds to build instance. [ 1086.265101] env[62627]: DEBUG nova.compute.manager [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1086.312524] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.312793] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.313013] env[62627]: DEBUG nova.objects.instance [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lazy-loading 'resources' on Instance uuid 5edf4b5b-93b1-4efc-80ec-f3a7009d5449 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.549049] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194894, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063614} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.549311] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1086.550076] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d00931-5430-4b9f-b50f-8b8a7741a18b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.573269] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] aca062be-755d-4e41-b9ed-f28d2d3bf598/aca062be-755d-4e41-b9ed-f28d2d3bf598.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.573802] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-774ce786-2258-49eb-b435-0dff0d8265d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.591112] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525de747-6ef3-6c8c-e4b8-b235a2ffcebc, 'name': SearchDatastore_Task, 'duration_secs': 0.008188} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.591734] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.592111] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] e71794b9-22eb-4643-8587-f12a198024a4/e71794b9-22eb-4643-8587-f12a198024a4.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1086.592303] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70ca8fb1-bb5b-4be8-928d-f8cfed7ad539 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.595878] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for the task: (returnval){ [ 1086.595878] env[62627]: value = "task-2194895" [ 1086.595878] env[62627]: _type = "Task" [ 1086.595878] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.599775] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1086.599775] env[62627]: value = "task-2194896" [ 1086.599775] env[62627]: _type = "Task" [ 1086.599775] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.605821] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194895, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.610216] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194896, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.683056] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4b0e581b-c747-4421-ba7b-653773613c84 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "78307469-ebbe-45bc-a146-eaa298e9b707" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.517s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.789051] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.794693] env[62627]: INFO nova.compute.manager [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Rebuilding instance [ 1086.835875] env[62627]: DEBUG nova.compute.manager [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1086.836795] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fdcaee-7245-4850-9a30-530ea6de5f12 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.044220] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62c29b9-c74f-4a2d-b902-b65f57899888 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.052010] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfa95b5-9b91-4ef9-9a4c-20433fbe4ee8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.082067] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa08bff0-5e06-4125-886a-43aec5a02fd1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.089279] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccba412-4d6a-4d48-9eb3-3358ebb65609 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.102773] env[62627]: DEBUG nova.compute.provider_tree [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.115905] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194895, 'name': ReconfigVM_Task, 'duration_secs': 0.29087} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.118996] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Reconfigured VM instance instance-00000052 to attach disk [datastore2] aca062be-755d-4e41-b9ed-f28d2d3bf598/aca062be-755d-4e41-b9ed-f28d2d3bf598.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.119978] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194896, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.120219] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bb684a7-cb80-4de5-9fbb-0ed850650b76 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.127953] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for the task: (returnval){ [ 1087.127953] env[62627]: value = "task-2194897" [ 1087.127953] env[62627]: _type = "Task" [ 1087.127953] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.137469] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194897, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.613183] env[62627]: DEBUG nova.scheduler.client.report [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.617065] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194896, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.933292} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.617688] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] e71794b9-22eb-4643-8587-f12a198024a4/e71794b9-22eb-4643-8587-f12a198024a4.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1087.618027] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1087.618376] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68641811-1156-4c43-8c66-9d4b1d658030 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.626873] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1087.626873] env[62627]: value = "task-2194898" [ 1087.626873] env[62627]: _type = "Task" [ 1087.626873] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.641919] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194898, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.645484] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194897, 'name': Rename_Task, 'duration_secs': 0.234505} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.645813] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.646404] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4890de4-33e1-4b21-bfbd-27a8a1f7f1d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.653399] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for the task: (returnval){ [ 1087.653399] env[62627]: value = "task-2194899" [ 1087.653399] env[62627]: _type = "Task" [ 1087.653399] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.662229] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194899, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.679551] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1087.679864] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1087.853796] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1087.854195] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6508b7d-4c30-4bbb-b221-5fd285afe9a2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.861461] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1087.861461] env[62627]: value = "task-2194900" [ 1087.861461] env[62627]: _type = "Task" [ 1087.861461] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.869520] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.118921] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.806s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.121843] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.332s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.122916] env[62627]: INFO nova.compute.claims [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1088.137543] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194898, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.170628} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.137830] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1088.138691] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9fe377-31dc-48ce-9179-1778274e5988 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.163451] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] e71794b9-22eb-4643-8587-f12a198024a4/e71794b9-22eb-4643-8587-f12a198024a4.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1088.164389] env[62627]: INFO nova.scheduler.client.report [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Deleted allocations for instance 5edf4b5b-93b1-4efc-80ec-f3a7009d5449 [ 1088.172508] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71c87b63-19bc-432c-a9d9-d08304dd9321 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.194830] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1088.195169] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1088.201089] env[62627]: DEBUG oslo_vmware.api [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194899, 'name': PowerOnVM_Task, 'duration_secs': 0.488307} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.202806] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1088.203377] env[62627]: INFO nova.compute.manager [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Took 6.70 seconds to spawn the instance on the hypervisor. [ 1088.203588] env[62627]: DEBUG nova.compute.manager [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.204211] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1088.204211] env[62627]: value = "task-2194901" [ 1088.204211] env[62627]: _type = "Task" [ 1088.204211] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.204990] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc95519-6a7f-48c7-b64f-7047e2612d92 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.222572] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194901, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.371557] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194900, 'name': PowerOffVM_Task, 'duration_secs': 0.384267} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.371789] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1088.372037] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1088.372792] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea133a05-94fb-40d4-b5b5-b0da71bd4700 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.379383] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1088.379651] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac5852f3-8dfb-4833-b7fe-f63d82a47ed1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.436411] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1088.436647] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1088.436836] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleting the datastore file [datastore2] 78307469-ebbe-45bc-a146-eaa298e9b707 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1088.437123] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9874c9c9-d9a8-4db1-9524-7762165b96af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.443966] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1088.443966] env[62627]: value = "task-2194903" [ 1088.443966] env[62627]: _type = "Task" [ 1088.443966] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.452287] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194903, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.692786] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c2dcb77e-a8e6-41b8-83e5-1e10eb1d71f6 tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "5edf4b5b-93b1-4efc-80ec-f3a7009d5449" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.026s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.718550] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194901, 'name': ReconfigVM_Task, 'duration_secs': 0.3396} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.722469] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Reconfigured VM instance instance-00000050 to attach disk [datastore2] e71794b9-22eb-4643-8587-f12a198024a4/e71794b9-22eb-4643-8587-f12a198024a4.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1088.724716] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b93b0a37-c5a4-4547-8018-29569196e963 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.726808] env[62627]: INFO nova.compute.manager [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Took 12.18 seconds to build instance. [ 1088.733446] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1088.733446] env[62627]: value = "task-2194904" [ 1088.733446] env[62627]: _type = "Task" [ 1088.733446] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.742400] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194904, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.957473] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194903, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193373} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.957889] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1088.957953] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1088.958155] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1089.229194] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ead37ed3-5e14-4101-b96c-8012ccf4ef68 tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Lock "aca062be-755d-4e41-b9ed-f28d2d3bf598" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.693s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.244196] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194904, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.299516] env[62627]: DEBUG oslo_concurrency.lockutils [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "61f54696-1ce2-4515-ab9c-72347dbd9de0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.299958] env[62627]: DEBUG oslo_concurrency.lockutils [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "61f54696-1ce2-4515-ab9c-72347dbd9de0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.300318] env[62627]: DEBUG oslo_concurrency.lockutils [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "61f54696-1ce2-4515-ab9c-72347dbd9de0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.300637] env[62627]: DEBUG oslo_concurrency.lockutils [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "61f54696-1ce2-4515-ab9c-72347dbd9de0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.301491] env[62627]: DEBUG oslo_concurrency.lockutils [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "61f54696-1ce2-4515-ab9c-72347dbd9de0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.303888] env[62627]: INFO nova.compute.manager [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Terminating instance [ 1089.352305] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cceb712-79a4-4d7d-b565-9caaf5ef6932 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.359743] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d9f411-8abe-4b35-9d3f-d3fafc6548b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.388791] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc47eb7-b5e6-4e4b-8eae-415b46e3fbbc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.395468] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354faa1b-2dab-4332-82b9-64d7e5887b75 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.409408] env[62627]: DEBUG nova.compute.provider_tree [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.744282] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194904, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.775417] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquiring lock "aca062be-755d-4e41-b9ed-f28d2d3bf598" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.775654] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Lock "aca062be-755d-4e41-b9ed-f28d2d3bf598" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.775851] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquiring lock "aca062be-755d-4e41-b9ed-f28d2d3bf598-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.776044] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Lock "aca062be-755d-4e41-b9ed-f28d2d3bf598-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.776266] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Lock "aca062be-755d-4e41-b9ed-f28d2d3bf598-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.778307] env[62627]: INFO nova.compute.manager [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Terminating instance [ 1089.809831] env[62627]: DEBUG nova.compute.manager [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1089.810040] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1089.810939] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29bb41e-80e4-46d8-abdf-8bbb5ca8771c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.818782] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.819041] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b7228d0-07ec-4720-a07d-7c25bffa11a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.824935] env[62627]: DEBUG oslo_vmware.api [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1089.824935] env[62627]: value = "task-2194905" [ 1089.824935] env[62627]: _type = "Task" [ 1089.824935] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.833083] env[62627]: DEBUG oslo_vmware.api [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194905, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.913289] env[62627]: DEBUG nova.scheduler.client.report [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.994315] env[62627]: DEBUG nova.virt.hardware [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1089.994633] env[62627]: DEBUG nova.virt.hardware [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1089.994747] env[62627]: DEBUG nova.virt.hardware [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1089.994967] env[62627]: DEBUG nova.virt.hardware [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1089.995228] env[62627]: DEBUG nova.virt.hardware [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1089.995483] env[62627]: DEBUG nova.virt.hardware [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1089.995680] env[62627]: DEBUG nova.virt.hardware [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1089.995877] env[62627]: DEBUG nova.virt.hardware [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1089.996111] env[62627]: DEBUG nova.virt.hardware [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1089.996362] env[62627]: DEBUG nova.virt.hardware [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1089.996603] env[62627]: DEBUG nova.virt.hardware [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1089.997864] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb18b8e-293b-41b3-b791-fd572d467e11 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.006561] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242fb18e-573b-45b3-b120-b2cceeb291f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.019576] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:f0:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '098df9b7-d759-47f7-b756-334848cb423b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75c54554-1d03-46ab-bb41-d84aca2543fe', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1090.026808] env[62627]: DEBUG oslo.service.loopingcall [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1090.027055] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1090.027385] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c473dd38-dc45-4714-bdfc-cb458b3d6747 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.046107] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1090.046107] env[62627]: value = "task-2194906" [ 1090.046107] env[62627]: _type = "Task" [ 1090.046107] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.053423] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194906, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.243926] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194904, 'name': Rename_Task, 'duration_secs': 1.145812} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.244234] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1090.244477] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7cbceba3-027b-4026-95e8-ae75934fa4a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.251119] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1090.251119] env[62627]: value = "task-2194907" [ 1090.251119] env[62627]: _type = "Task" [ 1090.251119] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.258756] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194907, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.281770] env[62627]: DEBUG nova.compute.manager [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1090.282153] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1090.283429] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce642fb-9584-4156-bd29-a1fd1952748e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.290523] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1090.290751] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85cad774-0d22-4a03-8339-b53156f57a47 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.295987] env[62627]: DEBUG oslo_vmware.api [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for the task: (returnval){ [ 1090.295987] env[62627]: value = "task-2194908" [ 1090.295987] env[62627]: _type = "Task" [ 1090.295987] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.304713] env[62627]: DEBUG oslo_vmware.api [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194908, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.334734] env[62627]: DEBUG oslo_vmware.api [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194905, 'name': PowerOffVM_Task, 'duration_secs': 0.261102} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.335065] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.335255] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.335539] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36fca9dd-6f6a-4997-bed7-95accb193646 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.395736] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.395854] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.396026] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Deleting the datastore file [datastore2] 61f54696-1ce2-4515-ab9c-72347dbd9de0 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.396391] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6730b933-f724-4a23-b8ef-06c7465aadf8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.402626] env[62627]: DEBUG oslo_vmware.api [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1090.402626] env[62627]: value = "task-2194910" [ 1090.402626] env[62627]: _type = "Task" [ 1090.402626] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.410571] env[62627]: DEBUG oslo_vmware.api [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.420602] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.421150] env[62627]: DEBUG nova.compute.manager [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1090.427174] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.427431] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.556345] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194906, 'name': CreateVM_Task, 'duration_secs': 0.39013} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.556544] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1090.557271] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.557459] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.557794] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1090.558055] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfe6c988-5be2-4707-9e8b-6594cf33b8a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.565357] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1090.565357] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520afb41-2be8-5df3-16f6-f7e946218390" [ 1090.565357] env[62627]: _type = "Task" [ 1090.565357] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.573025] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520afb41-2be8-5df3-16f6-f7e946218390, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.761960] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194907, 'name': PowerOnVM_Task, 'duration_secs': 0.461683} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.762246] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1090.762449] env[62627]: DEBUG nova.compute.manager [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1090.763239] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdaba1d-bcb2-4aed-a868-654520eeeeb5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.805447] env[62627]: DEBUG oslo_vmware.api [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194908, 'name': PowerOffVM_Task, 'duration_secs': 0.192306} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.805714] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.805881] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.806167] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf180b98-2fdf-4771-8340-f83266c7c425 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.899339] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.899554] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.899737] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Deleting the datastore file [datastore2] aca062be-755d-4e41-b9ed-f28d2d3bf598 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.899994] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-888c057b-59a9-412f-a267-080aac6f21cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.907427] env[62627]: DEBUG oslo_vmware.api [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for the task: (returnval){ [ 1090.907427] env[62627]: value = "task-2194912" [ 1090.907427] env[62627]: _type = "Task" [ 1090.907427] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.913578] env[62627]: DEBUG oslo_vmware.api [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194910, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136721} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.914233] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.914443] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1090.914627] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1090.914798] env[62627]: INFO nova.compute.manager [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1090.915054] env[62627]: DEBUG oslo.service.loopingcall [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1090.918117] env[62627]: DEBUG nova.compute.manager [-] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1090.918230] env[62627]: DEBUG nova.network.neutron [-] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1090.919889] env[62627]: DEBUG oslo_vmware.api [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194912, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.926676] env[62627]: DEBUG nova.compute.utils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1090.928096] env[62627]: DEBUG nova.compute.manager [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1090.928267] env[62627]: DEBUG nova.network.neutron [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1090.930805] env[62627]: DEBUG nova.compute.utils [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1090.981112] env[62627]: DEBUG nova.policy [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1253a87d708448aba8d97fcabe137853', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c9263b20f714274a55b88a8bdbd5251', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1091.081747] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520afb41-2be8-5df3-16f6-f7e946218390, 'name': SearchDatastore_Task, 'duration_secs': 0.010423} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.081747] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.081747] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1091.081747] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.081747] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.081747] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.081747] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bca2c7f2-db5c-4474-aea5-9ca053615841 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.088047] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.088461] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1091.089411] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8ddb112-d033-4e76-92be-2863da068eac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.096033] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1091.096033] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52adabc1-f52e-c3c8-b7b8-ac20aaff4794" [ 1091.096033] env[62627]: _type = "Task" [ 1091.096033] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.103139] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52adabc1-f52e-c3c8-b7b8-ac20aaff4794, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.157685] env[62627]: DEBUG nova.compute.manager [req-26f67d87-238c-4c67-b80d-bd47ce59c7ff req-631da723-dc0e-41eb-bc78-5bbbcdd867e4 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Received event network-vif-deleted-1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1091.158128] env[62627]: INFO nova.compute.manager [req-26f67d87-238c-4c67-b80d-bd47ce59c7ff req-631da723-dc0e-41eb-bc78-5bbbcdd867e4 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Neutron deleted interface 1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f; detaching it from the instance and deleting it from the info cache [ 1091.158128] env[62627]: DEBUG nova.network.neutron [req-26f67d87-238c-4c67-b80d-bd47ce59c7ff req-631da723-dc0e-41eb-bc78-5bbbcdd867e4 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.263586] env[62627]: DEBUG nova.network.neutron [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Successfully created port: 83a5bd04-2ff4-4441-aa87-768435659d4d {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1091.272988] env[62627]: INFO nova.compute.manager [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] bringing vm to original state: 'stopped' [ 1091.422491] env[62627]: DEBUG oslo_vmware.api [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Task: {'id': task-2194912, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128572} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.422870] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.423163] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1091.423527] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1091.423831] env[62627]: INFO nova.compute.manager [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1091.424247] env[62627]: DEBUG oslo.service.loopingcall [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.424540] env[62627]: DEBUG nova.compute.manager [-] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1091.424693] env[62627]: DEBUG nova.network.neutron [-] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1091.431167] env[62627]: DEBUG nova.compute.manager [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1091.435408] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.605932] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52adabc1-f52e-c3c8-b7b8-ac20aaff4794, 'name': SearchDatastore_Task, 'duration_secs': 0.008559} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.606755] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28da0f5b-c6d7-48ec-98b4-fb3d70595e64 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.611659] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1091.611659] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525d27a0-8d53-e3fb-0d66-c1825a3ec614" [ 1091.611659] env[62627]: _type = "Task" [ 1091.611659] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.619255] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525d27a0-8d53-e3fb-0d66-c1825a3ec614, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.641769] env[62627]: DEBUG nova.network.neutron [-] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.662622] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-279bb947-4589-4a14-87d2-81805ce6b92b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.672175] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67773a80-95fa-442c-93a4-74dc6a82cafd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.705072] env[62627]: DEBUG nova.compute.manager [req-26f67d87-238c-4c67-b80d-bd47ce59c7ff req-631da723-dc0e-41eb-bc78-5bbbcdd867e4 service nova] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Detach interface failed, port_id=1a7a45f5-84e1-4b69-add9-e5b1fe8fd92f, reason: Instance 61f54696-1ce2-4515-ab9c-72347dbd9de0 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1092.122378] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525d27a0-8d53-e3fb-0d66-c1825a3ec614, 'name': SearchDatastore_Task, 'duration_secs': 0.026007} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.122700] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.123011] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 78307469-ebbe-45bc-a146-eaa298e9b707/78307469-ebbe-45bc-a146-eaa298e9b707.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1092.123316] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1ed7048-881a-4c0c-b8ca-db9882770b76 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.130559] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1092.130559] env[62627]: value = "task-2194913" [ 1092.130559] env[62627]: _type = "Task" [ 1092.130559] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.139278] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194913, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.143780] env[62627]: INFO nova.compute.manager [-] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Took 1.23 seconds to deallocate network for instance. [ 1092.239638] env[62627]: DEBUG nova.network.neutron [-] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.280552] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "e71794b9-22eb-4643-8587-f12a198024a4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.281100] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "e71794b9-22eb-4643-8587-f12a198024a4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.281215] env[62627]: DEBUG nova.compute.manager [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1092.282017] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cf41e0-c421-4286-96f7-afcd94b13948 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.289530] env[62627]: DEBUG nova.compute.manager [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62627) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1092.448685] env[62627]: DEBUG nova.compute.manager [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1092.478206] env[62627]: DEBUG nova.virt.hardware [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1092.478497] env[62627]: DEBUG nova.virt.hardware [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.478699] env[62627]: DEBUG nova.virt.hardware [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1092.478899] env[62627]: DEBUG nova.virt.hardware [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.479082] env[62627]: DEBUG nova.virt.hardware [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1092.479328] env[62627]: DEBUG nova.virt.hardware [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1092.479576] env[62627]: DEBUG nova.virt.hardware [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1092.479751] env[62627]: DEBUG nova.virt.hardware [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1092.479924] env[62627]: DEBUG nova.virt.hardware [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1092.480110] env[62627]: DEBUG nova.virt.hardware [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1092.480324] env[62627]: DEBUG nova.virt.hardware [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1092.481372] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a267bb-2a77-4e61-a119-1d3171bef2ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.490899] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f37b4a-efa6-4e0f-abaa-92b538ce6554 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.509669] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.509902] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.510217] env[62627]: INFO nova.compute.manager [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Attaching volume 05c71695-2066-4611-ae96-3ddb34d2b201 to /dev/sdb [ 1092.547382] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdaa1c86-2f91-4a5c-9f7c-e3e8f12ee79e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.555374] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e933ad8c-ce3b-45b3-8715-e573aa3434d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.572120] env[62627]: DEBUG nova.virt.block_device [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updating existing volume attachment record: 3c01df41-ac39-4b76-910b-ec22500f6c0e {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1092.645632] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194913, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.650153] env[62627]: DEBUG oslo_concurrency.lockutils [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.650453] env[62627]: DEBUG oslo_concurrency.lockutils [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.650721] env[62627]: DEBUG nova.objects.instance [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lazy-loading 'resources' on Instance uuid 61f54696-1ce2-4515-ab9c-72347dbd9de0 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1092.711564] env[62627]: DEBUG nova.compute.manager [req-77e6cf6a-7714-4b3c-99dd-640bdfaffcf7 req-56133f12-b334-4a99-826f-e0a0e159bb60 service nova] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Received event network-vif-plugged-83a5bd04-2ff4-4441-aa87-768435659d4d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1092.711778] env[62627]: DEBUG oslo_concurrency.lockutils [req-77e6cf6a-7714-4b3c-99dd-640bdfaffcf7 req-56133f12-b334-4a99-826f-e0a0e159bb60 service nova] Acquiring lock "0ee04bf9-6aa6-485c-a979-191d52af236d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.712145] env[62627]: DEBUG oslo_concurrency.lockutils [req-77e6cf6a-7714-4b3c-99dd-640bdfaffcf7 req-56133f12-b334-4a99-826f-e0a0e159bb60 service nova] Lock "0ee04bf9-6aa6-485c-a979-191d52af236d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.712196] env[62627]: DEBUG oslo_concurrency.lockutils [req-77e6cf6a-7714-4b3c-99dd-640bdfaffcf7 req-56133f12-b334-4a99-826f-e0a0e159bb60 service nova] Lock "0ee04bf9-6aa6-485c-a979-191d52af236d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.714073] env[62627]: DEBUG nova.compute.manager [req-77e6cf6a-7714-4b3c-99dd-640bdfaffcf7 req-56133f12-b334-4a99-826f-e0a0e159bb60 service nova] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] No waiting events found dispatching network-vif-plugged-83a5bd04-2ff4-4441-aa87-768435659d4d {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1092.714073] env[62627]: WARNING nova.compute.manager [req-77e6cf6a-7714-4b3c-99dd-640bdfaffcf7 req-56133f12-b334-4a99-826f-e0a0e159bb60 service nova] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Received unexpected event network-vif-plugged-83a5bd04-2ff4-4441-aa87-768435659d4d for instance with vm_state building and task_state spawning. [ 1092.744228] env[62627]: INFO nova.compute.manager [-] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Took 1.32 seconds to deallocate network for instance. [ 1092.794027] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1092.794488] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca57a67e-a049-4722-ad0a-88fb9adab449 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.801102] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1092.801102] env[62627]: value = "task-2194915" [ 1092.801102] env[62627]: _type = "Task" [ 1092.801102] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.808870] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194915, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.833132] env[62627]: DEBUG nova.network.neutron [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Successfully updated port: 83a5bd04-2ff4-4441-aa87-768435659d4d {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1093.141093] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194913, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627576} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.141439] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 78307469-ebbe-45bc-a146-eaa298e9b707/78307469-ebbe-45bc-a146-eaa298e9b707.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1093.141516] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1093.141802] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58ced28e-2da9-4b72-b9a2-6b4a25ada572 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.148445] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1093.148445] env[62627]: value = "task-2194918" [ 1093.148445] env[62627]: _type = "Task" [ 1093.148445] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.159449] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194918, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.191499] env[62627]: DEBUG nova.compute.manager [req-4178607c-db34-49ce-92e6-199599633ccc req-7f52aa4a-f5de-42ce-8466-58869bd16166 service nova] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Received event network-vif-deleted-a3ecaa14-2705-45f1-a66a-eb481153f099 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1093.250457] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.311028] env[62627]: DEBUG oslo_vmware.api [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194915, 'name': PowerOffVM_Task, 'duration_secs': 0.175846} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.313627] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1093.313829] env[62627]: DEBUG nova.compute.manager [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1093.314844] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3fec2f5-7f45-4ce4-ae08-d475b13cf7b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.339261] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "refresh_cache-0ee04bf9-6aa6-485c-a979-191d52af236d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.339478] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "refresh_cache-0ee04bf9-6aa6-485c-a979-191d52af236d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.339650] env[62627]: DEBUG nova.network.neutron [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1093.385895] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2767b426-5494-41fb-a33b-3f85b8b7d389 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.394408] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd625b07-3307-46ae-940a-7ab4a72f1522 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.425752] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4147d8f9-3d1d-4152-85f4-6e50dd915eb8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.433349] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef791ed3-0fc1-43a7-ac0e-10bf786d0908 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.446400] env[62627]: DEBUG nova.compute.provider_tree [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.658243] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194918, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071648} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.658441] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1093.659263] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11cc2c28-833c-4051-9b34-91d2865d1d79 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.680582] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 78307469-ebbe-45bc-a146-eaa298e9b707/78307469-ebbe-45bc-a146-eaa298e9b707.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1093.680841] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4dbd880d-e9df-4c8b-8288-5da3d29b0318 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.699892] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1093.699892] env[62627]: value = "task-2194919" [ 1093.699892] env[62627]: _type = "Task" [ 1093.699892] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.707523] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194919, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.827055] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "e71794b9-22eb-4643-8587-f12a198024a4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.546s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.872885] env[62627]: DEBUG nova.network.neutron [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1093.949841] env[62627]: DEBUG nova.scheduler.client.report [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.054658] env[62627]: DEBUG nova.network.neutron [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Updating instance_info_cache with network_info: [{"id": "83a5bd04-2ff4-4441-aa87-768435659d4d", "address": "fa:16:3e:f9:0b:4a", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83a5bd04-2f", "ovs_interfaceid": "83a5bd04-2ff4-4441-aa87-768435659d4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.210346] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.335864] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.456012] env[62627]: DEBUG oslo_concurrency.lockutils [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.805s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.458436] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.208s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.458705] env[62627]: DEBUG nova.objects.instance [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Lazy-loading 'resources' on Instance uuid aca062be-755d-4e41-b9ed-f28d2d3bf598 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.481176] env[62627]: INFO nova.scheduler.client.report [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Deleted allocations for instance 61f54696-1ce2-4515-ab9c-72347dbd9de0 [ 1094.550472] env[62627]: DEBUG oslo_concurrency.lockutils [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "e71794b9-22eb-4643-8587-f12a198024a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.550729] env[62627]: DEBUG oslo_concurrency.lockutils [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "e71794b9-22eb-4643-8587-f12a198024a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.551565] env[62627]: DEBUG oslo_concurrency.lockutils [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "e71794b9-22eb-4643-8587-f12a198024a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.551565] env[62627]: DEBUG oslo_concurrency.lockutils [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "e71794b9-22eb-4643-8587-f12a198024a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.551565] env[62627]: DEBUG oslo_concurrency.lockutils [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "e71794b9-22eb-4643-8587-f12a198024a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.553340] env[62627]: INFO nova.compute.manager [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Terminating instance [ 1094.556747] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "refresh_cache-0ee04bf9-6aa6-485c-a979-191d52af236d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.557015] env[62627]: DEBUG nova.compute.manager [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Instance network_info: |[{"id": "83a5bd04-2ff4-4441-aa87-768435659d4d", "address": "fa:16:3e:f9:0b:4a", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83a5bd04-2f", "ovs_interfaceid": "83a5bd04-2ff4-4441-aa87-768435659d4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1094.557608] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:0b:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99be9a5e-b3f9-4e6c-83d5-df11f817847d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83a5bd04-2ff4-4441-aa87-768435659d4d', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1094.565037] env[62627]: DEBUG oslo.service.loopingcall [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1094.565676] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1094.566274] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a15398e1-beb9-4dbf-a48c-97949f714d45 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.587809] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1094.587809] env[62627]: value = "task-2194920" [ 1094.587809] env[62627]: _type = "Task" [ 1094.587809] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.595775] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194920, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.710404] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194919, 'name': ReconfigVM_Task, 'duration_secs': 0.676463} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.710998] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 78307469-ebbe-45bc-a146-eaa298e9b707/78307469-ebbe-45bc-a146-eaa298e9b707.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1094.711641] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afd28b4e-18f0-42ba-9092-90401c59d55a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.717950] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1094.717950] env[62627]: value = "task-2194921" [ 1094.717950] env[62627]: _type = "Task" [ 1094.717950] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.725522] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194921, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.737516] env[62627]: DEBUG nova.compute.manager [req-933ccc6e-53a3-4b57-9fd5-134eb3d21fc6 req-4e4c88d1-566d-4c44-bf45-5360743a1b70 service nova] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Received event network-changed-83a5bd04-2ff4-4441-aa87-768435659d4d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1094.738254] env[62627]: DEBUG nova.compute.manager [req-933ccc6e-53a3-4b57-9fd5-134eb3d21fc6 req-4e4c88d1-566d-4c44-bf45-5360743a1b70 service nova] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Refreshing instance network info cache due to event network-changed-83a5bd04-2ff4-4441-aa87-768435659d4d. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1094.738254] env[62627]: DEBUG oslo_concurrency.lockutils [req-933ccc6e-53a3-4b57-9fd5-134eb3d21fc6 req-4e4c88d1-566d-4c44-bf45-5360743a1b70 service nova] Acquiring lock "refresh_cache-0ee04bf9-6aa6-485c-a979-191d52af236d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1094.738355] env[62627]: DEBUG oslo_concurrency.lockutils [req-933ccc6e-53a3-4b57-9fd5-134eb3d21fc6 req-4e4c88d1-566d-4c44-bf45-5360743a1b70 service nova] Acquired lock "refresh_cache-0ee04bf9-6aa6-485c-a979-191d52af236d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.738629] env[62627]: DEBUG nova.network.neutron [req-933ccc6e-53a3-4b57-9fd5-134eb3d21fc6 req-4e4c88d1-566d-4c44-bf45-5360743a1b70 service nova] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Refreshing network info cache for port 83a5bd04-2ff4-4441-aa87-768435659d4d {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1094.988496] env[62627]: DEBUG oslo_concurrency.lockutils [None req-68ee26e6-8bc4-42b7-99d1-d20d84e12aca tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "61f54696-1ce2-4515-ab9c-72347dbd9de0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.688s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.057191] env[62627]: DEBUG nova.compute.manager [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1095.057448] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1095.058364] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dd42b4-8b16-48c2-836f-c6ae45dfc322 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.066359] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1095.066608] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43527725-ff0e-49f3-9ce0-2b169d77628a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.100710] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194920, 'name': CreateVM_Task, 'duration_secs': 0.269838} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.100710] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1095.101033] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1095.101173] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.101475] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1095.103865] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0071cfe5-02c4-4039-8542-182898b85f40 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.108938] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1095.108938] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523f7c44-4cd9-0135-63fe-48f409dad928" [ 1095.108938] env[62627]: _type = "Task" [ 1095.108938] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.116886] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523f7c44-4cd9-0135-63fe-48f409dad928, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.122717] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1095.123297] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1095.123297] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleting the datastore file [datastore2] e71794b9-22eb-4643-8587-f12a198024a4 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1095.123297] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9dafc2e-15d3-4202-ad58-5e12b64f3146 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.130613] env[62627]: DEBUG oslo_vmware.api [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1095.130613] env[62627]: value = "task-2194924" [ 1095.130613] env[62627]: _type = "Task" [ 1095.130613] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.140846] env[62627]: DEBUG oslo_vmware.api [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.188627] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fdb870-6085-49a9-aa5f-7d360cd14fb1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.195883] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23b908e-4d04-4053-b4dc-11f382cf0abe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.230242] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82474863-5f3e-43ea-b738-35713a5df792 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.241060] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194921, 'name': Rename_Task, 'duration_secs': 0.140726} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.243471] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1095.244458] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae77c5b4-c3fd-4d0e-97f9-1d7b05f56812 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.248080] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c1b0de1-df00-499c-bc6d-0ad4e9547aff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.259347] env[62627]: DEBUG nova.compute.provider_tree [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.264098] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1095.264098] env[62627]: value = "task-2194925" [ 1095.264098] env[62627]: _type = "Task" [ 1095.264098] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.271251] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194925, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.469539] env[62627]: DEBUG nova.network.neutron [req-933ccc6e-53a3-4b57-9fd5-134eb3d21fc6 req-4e4c88d1-566d-4c44-bf45-5360743a1b70 service nova] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Updated VIF entry in instance network info cache for port 83a5bd04-2ff4-4441-aa87-768435659d4d. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1095.469885] env[62627]: DEBUG nova.network.neutron [req-933ccc6e-53a3-4b57-9fd5-134eb3d21fc6 req-4e4c88d1-566d-4c44-bf45-5360743a1b70 service nova] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Updating instance_info_cache with network_info: [{"id": "83a5bd04-2ff4-4441-aa87-768435659d4d", "address": "fa:16:3e:f9:0b:4a", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83a5bd04-2f", "ovs_interfaceid": "83a5bd04-2ff4-4441-aa87-768435659d4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.471276] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquiring lock "513c755a-424a-4e47-887e-7b315ad2e583" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.471276] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Lock "513c755a-424a-4e47-887e-7b315ad2e583" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.471463] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquiring lock "513c755a-424a-4e47-887e-7b315ad2e583-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.471606] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Lock "513c755a-424a-4e47-887e-7b315ad2e583-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.471771] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Lock "513c755a-424a-4e47-887e-7b315ad2e583-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.474048] env[62627]: INFO nova.compute.manager [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Terminating instance [ 1095.621233] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523f7c44-4cd9-0135-63fe-48f409dad928, 'name': SearchDatastore_Task, 'duration_secs': 0.020032} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.621538] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.621774] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1095.622016] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1095.622171] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.622353] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1095.622618] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f58d5ec-344d-42f7-b302-dac4a924254e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.635188] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1095.635353] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1095.636052] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10e8c005-358b-40db-9ec1-2970682eedcf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.642307] env[62627]: DEBUG oslo_vmware.api [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194924, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122998} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.643389] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1095.643581] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1095.643754] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1095.643931] env[62627]: INFO nova.compute.manager [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1095.644196] env[62627]: DEBUG oslo.service.loopingcall [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1095.644472] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1095.644472] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f86a20-c093-4390-35dc-75171b47b640" [ 1095.644472] env[62627]: _type = "Task" [ 1095.644472] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.644652] env[62627]: DEBUG nova.compute.manager [-] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1095.644762] env[62627]: DEBUG nova.network.neutron [-] [instance: e71794b9-22eb-4643-8587-f12a198024a4] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1095.653875] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f86a20-c093-4390-35dc-75171b47b640, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.742165] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Didn't find any instances for network info cache update. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1095.742403] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.742948] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.743204] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.743303] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.743429] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.743577] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.743709] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1095.743970] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.764033] env[62627]: DEBUG nova.scheduler.client.report [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1095.776651] env[62627]: DEBUG oslo_vmware.api [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194925, 'name': PowerOnVM_Task, 'duration_secs': 0.438396} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.777538] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1095.777538] env[62627]: DEBUG nova.compute.manager [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1095.777929] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4ba0c3-d2c7-44ab-b9a0-28398f076c7c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.959752] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "e5a1fa80-6579-4334-9705-33a95dbc3797" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.960058] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "e5a1fa80-6579-4334-9705-33a95dbc3797" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.960293] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "e5a1fa80-6579-4334-9705-33a95dbc3797-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.960508] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "e5a1fa80-6579-4334-9705-33a95dbc3797-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.960691] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "e5a1fa80-6579-4334-9705-33a95dbc3797-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.963087] env[62627]: INFO nova.compute.manager [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Terminating instance [ 1095.974386] env[62627]: DEBUG oslo_concurrency.lockutils [req-933ccc6e-53a3-4b57-9fd5-134eb3d21fc6 req-4e4c88d1-566d-4c44-bf45-5360743a1b70 service nova] Releasing lock "refresh_cache-0ee04bf9-6aa6-485c-a979-191d52af236d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.977866] env[62627]: DEBUG nova.compute.manager [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1095.978131] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1095.979393] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78e048d-bd7f-4bde-8a3c-4f97b3a8124e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.988252] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.988381] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03a8549e-2ac7-433b-be52-7f227c743da9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.994627] env[62627]: DEBUG oslo_vmware.api [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for the task: (returnval){ [ 1095.994627] env[62627]: value = "task-2194926" [ 1095.994627] env[62627]: _type = "Task" [ 1095.994627] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.002140] env[62627]: DEBUG oslo_vmware.api [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194926, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.155867] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f86a20-c093-4390-35dc-75171b47b640, 'name': SearchDatastore_Task, 'duration_secs': 0.016184} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.156748] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ffafb5b-94b3-45ab-b1a1-0f2ada1e4511 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.163289] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1096.163289] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529b4abb-0826-80d3-7cd5-75282ff0bc13" [ 1096.163289] env[62627]: _type = "Task" [ 1096.163289] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.176946] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529b4abb-0826-80d3-7cd5-75282ff0bc13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.246513] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.272406] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.814s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.274769] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.939s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.274962] env[62627]: DEBUG nova.objects.instance [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1096.294154] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.295613] env[62627]: INFO nova.scheduler.client.report [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Deleted allocations for instance aca062be-755d-4e41-b9ed-f28d2d3bf598 [ 1096.364821] env[62627]: DEBUG nova.network.neutron [-] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.467057] env[62627]: DEBUG nova.compute.manager [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1096.467318] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1096.468243] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaaca5cd-0253-4e2e-83f5-a87815bad9ee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.475953] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1096.476210] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41866922-a480-4a98-b141-1f436c1374db {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.482361] env[62627]: DEBUG oslo_vmware.api [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1096.482361] env[62627]: value = "task-2194927" [ 1096.482361] env[62627]: _type = "Task" [ 1096.482361] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.489973] env[62627]: DEBUG oslo_vmware.api [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194927, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.503087] env[62627]: DEBUG oslo_vmware.api [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194926, 'name': PowerOffVM_Task, 'duration_secs': 0.218758} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.503347] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.503535] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.503800] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1851baa7-4361-41b1-a8e2-b954b9f6f1b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.569497] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1096.569726] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1096.569961] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Deleting the datastore file [datastore1] 513c755a-424a-4e47-887e-7b315ad2e583 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.570279] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ca6d86a-ef59-4160-873e-0da174b0b906 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.576826] env[62627]: DEBUG oslo_vmware.api [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for the task: (returnval){ [ 1096.576826] env[62627]: value = "task-2194929" [ 1096.576826] env[62627]: _type = "Task" [ 1096.576826] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.584887] env[62627]: DEBUG oslo_vmware.api [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194929, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.595511] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquiring lock "66882603-6ff6-49c2-9f42-3521ecaf98b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.595844] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Lock "66882603-6ff6-49c2-9f42-3521ecaf98b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.596151] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquiring lock "66882603-6ff6-49c2-9f42-3521ecaf98b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.596503] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Lock "66882603-6ff6-49c2-9f42-3521ecaf98b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.596781] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Lock "66882603-6ff6-49c2-9f42-3521ecaf98b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.599378] env[62627]: INFO nova.compute.manager [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Terminating instance [ 1096.674984] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529b4abb-0826-80d3-7cd5-75282ff0bc13, 'name': SearchDatastore_Task, 'duration_secs': 0.012465} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.674984] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.674984] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 0ee04bf9-6aa6-485c-a979-191d52af236d/0ee04bf9-6aa6-485c-a979-191d52af236d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1096.675207] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3801d9f-86b1-48da-adba-ab6da50336ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.680863] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1096.680863] env[62627]: value = "task-2194930" [ 1096.680863] env[62627]: _type = "Task" [ 1096.680863] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.688729] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.762739] env[62627]: DEBUG nova.compute.manager [req-32201073-e20e-43a2-bc5e-e521c065b4c5 req-e730f17c-e585-4e41-b52e-77f100fd209a service nova] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Received event network-vif-deleted-c77ae6ec-d1ea-4f56-b856-910a5835bc0d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1096.801970] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0728787c-a739-4271-97c1-9268e19b25bd tempest-ImagesNegativeTestJSON-890659973 tempest-ImagesNegativeTestJSON-890659973-project-member] Lock "aca062be-755d-4e41-b9ed-f28d2d3bf598" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.026s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.867221] env[62627]: INFO nova.compute.manager [-] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Took 1.22 seconds to deallocate network for instance. [ 1096.994337] env[62627]: DEBUG oslo_vmware.api [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194927, 'name': PowerOffVM_Task, 'duration_secs': 0.23323} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.994705] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.994974] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.995335] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9fa597b0-6f54-42df-a786-87f562b2ac32 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.080972] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1097.080972] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1097.080972] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Deleting the datastore file [datastore1] e5a1fa80-6579-4334-9705-33a95dbc3797 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1097.091086] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df4e8646-40ff-4eee-af96-a3421e2abe43 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.101012] env[62627]: DEBUG oslo_vmware.api [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Task: {'id': task-2194929, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174988} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.101156] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.101375] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.101805] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.101805] env[62627]: INFO nova.compute.manager [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1097.101982] env[62627]: DEBUG oslo.service.loopingcall [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1097.103527] env[62627]: DEBUG oslo_vmware.api [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for the task: (returnval){ [ 1097.103527] env[62627]: value = "task-2194932" [ 1097.103527] env[62627]: _type = "Task" [ 1097.103527] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.103527] env[62627]: DEBUG nova.compute.manager [-] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1097.103527] env[62627]: DEBUG nova.network.neutron [-] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1097.109135] env[62627]: DEBUG nova.compute.manager [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1097.109380] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1097.109760] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "78307469-ebbe-45bc-a146-eaa298e9b707" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.109977] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "78307469-ebbe-45bc-a146-eaa298e9b707" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.110179] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "78307469-ebbe-45bc-a146-eaa298e9b707-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.110361] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "78307469-ebbe-45bc-a146-eaa298e9b707-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.110547] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "78307469-ebbe-45bc-a146-eaa298e9b707-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.112686] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6d4c1d-41a6-4634-b80c-7a91efe9756f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.116247] env[62627]: INFO nova.compute.manager [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Terminating instance [ 1097.121719] env[62627]: DEBUG oslo_vmware.api [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.128252] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1097.128252] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52a60bc4-0cd3-4a91-a452-d51d6990c9e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.132988] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1097.133238] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447765', 'volume_id': '05c71695-2066-4611-ae96-3ddb34d2b201', 'name': 'volume-05c71695-2066-4611-ae96-3ddb34d2b201', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '457e5aa6-d229-4f48-8817-e918ec4bd0f9', 'attached_at': '', 'detached_at': '', 'volume_id': '05c71695-2066-4611-ae96-3ddb34d2b201', 'serial': '05c71695-2066-4611-ae96-3ddb34d2b201'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1097.134151] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af25438a-2a9c-4e6e-88c4-139302bf7a92 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.139392] env[62627]: DEBUG oslo_vmware.api [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for the task: (returnval){ [ 1097.139392] env[62627]: value = "task-2194933" [ 1097.139392] env[62627]: _type = "Task" [ 1097.139392] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.157710] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7d859d-a6be-4bea-9028-5ee9538bda8d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.164754] env[62627]: DEBUG oslo_vmware.api [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194933, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.189307] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] volume-05c71695-2066-4611-ae96-3ddb34d2b201/volume-05c71695-2066-4611-ae96-3ddb34d2b201.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1097.196025] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd52b779-3d6d-4f14-a8c9-eae478dcb33c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.212462] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488698} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.213850] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 0ee04bf9-6aa6-485c-a979-191d52af236d/0ee04bf9-6aa6-485c-a979-191d52af236d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1097.214094] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1097.214537] env[62627]: DEBUG oslo_vmware.api [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1097.214537] env[62627]: value = "task-2194934" [ 1097.214537] env[62627]: _type = "Task" [ 1097.214537] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.214898] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8392a378-9072-41bc-abf9-40d7b5ceacd7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.225317] env[62627]: DEBUG oslo_vmware.api [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194934, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.228135] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1097.228135] env[62627]: value = "task-2194935" [ 1097.228135] env[62627]: _type = "Task" [ 1097.228135] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.237279] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194935, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.285263] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9caf9820-898e-46c8-a248-72f757e1f5a9 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.286890] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.040s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.287123] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.287311] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1097.287626] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.994s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.287838] env[62627]: DEBUG nova.objects.instance [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1097.291851] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093aca6b-4e5b-40cd-bc1a-5b98ad8a5cbb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.302395] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6344f21e-bfe9-47c7-a89e-10de3837ed4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.321876] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c902c256-8c75-4afb-8a9c-47b392d1db57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.329595] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee3f3e4-050a-4bf1-890e-0d3b5de9f2f3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.363606] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179030MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1097.363792] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.377227] env[62627]: DEBUG oslo_concurrency.lockutils [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.543105] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.543377] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.615202] env[62627]: DEBUG oslo_vmware.api [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Task: {'id': task-2194932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218729} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.615484] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.615674] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.615852] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.616035] env[62627]: INFO nova.compute.manager [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1097.616406] env[62627]: DEBUG oslo.service.loopingcall [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1097.616627] env[62627]: DEBUG nova.compute.manager [-] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1097.616734] env[62627]: DEBUG nova.network.neutron [-] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1097.624966] env[62627]: DEBUG nova.compute.manager [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1097.625184] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1097.626008] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1499db81-0411-46b4-adc4-fc96b10164d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.633955] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1097.634213] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2fa1b7f-ec6d-4a73-b08b-4c915eef5364 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.640887] env[62627]: DEBUG oslo_vmware.api [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1097.640887] env[62627]: value = "task-2194936" [ 1097.640887] env[62627]: _type = "Task" [ 1097.640887] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.653351] env[62627]: DEBUG oslo_vmware.api [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194933, 'name': PowerOffVM_Task, 'duration_secs': 0.205181} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.657251] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1097.657723] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1097.658000] env[62627]: DEBUG oslo_vmware.api [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194936, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.658220] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3842f9ad-8e4f-446e-9116-eb5648ae8243 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.727327] env[62627]: DEBUG oslo_vmware.api [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194934, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.729309] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1097.729532] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1097.729718] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Deleting the datastore file [datastore2] 66882603-6ff6-49c2-9f42-3521ecaf98b2 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1097.733420] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b90b2ce4-5527-4d4b-a4c5-09d71e9b6ca3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.739674] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194935, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069877} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.741114] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1097.741961] env[62627]: DEBUG oslo_vmware.api [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for the task: (returnval){ [ 1097.741961] env[62627]: value = "task-2194938" [ 1097.741961] env[62627]: _type = "Task" [ 1097.741961] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.742142] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8505ba66-2601-46a6-80a8-a81b97b28b37 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.772355] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 0ee04bf9-6aa6-485c-a979-191d52af236d/0ee04bf9-6aa6-485c-a979-191d52af236d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1097.776147] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f550130-a873-4b59-bcdd-0d3dab3765f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.791700] env[62627]: DEBUG oslo_vmware.api [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194938, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.801618] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1097.801618] env[62627]: value = "task-2194939" [ 1097.801618] env[62627]: _type = "Task" [ 1097.801618] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.812473] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194939, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.017129] env[62627]: DEBUG nova.compute.manager [req-8eeed524-6bd4-46a8-9450-2933f6d00567 req-f4fd0a6a-c205-4799-9cd0-d679b29cbbb9 service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Received event network-vif-deleted-6e993a7b-e7ba-4beb-ac82-cd2f2b111371 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1098.017346] env[62627]: INFO nova.compute.manager [req-8eeed524-6bd4-46a8-9450-2933f6d00567 req-f4fd0a6a-c205-4799-9cd0-d679b29cbbb9 service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Neutron deleted interface 6e993a7b-e7ba-4beb-ac82-cd2f2b111371; detaching it from the instance and deleting it from the info cache [ 1098.018056] env[62627]: DEBUG nova.network.neutron [req-8eeed524-6bd4-46a8-9450-2933f6d00567 req-f4fd0a6a-c205-4799-9cd0-d679b29cbbb9 service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.045883] env[62627]: DEBUG nova.compute.manager [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1098.153625] env[62627]: DEBUG oslo_vmware.api [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194936, 'name': PowerOffVM_Task, 'duration_secs': 0.371642} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.153893] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1098.154078] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1098.154391] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53031cea-3e30-44b8-9ade-1b3e585c8d2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.199017] env[62627]: DEBUG nova.network.neutron [-] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.222116] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1098.222549] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1098.222603] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleting the datastore file [datastore1] 78307469-ebbe-45bc-a146-eaa298e9b707 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1098.223255] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5c4e290-e080-4c94-bfa9-790138049f9e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.229552] env[62627]: DEBUG oslo_vmware.api [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194934, 'name': ReconfigVM_Task, 'duration_secs': 0.837099} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.229552] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Reconfigured VM instance instance-00000033 to attach disk [datastore2] volume-05c71695-2066-4611-ae96-3ddb34d2b201/volume-05c71695-2066-4611-ae96-3ddb34d2b201.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.235820] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2416276-9693-448a-8620-451eb38bcc75 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.247131] env[62627]: DEBUG oslo_vmware.api [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1098.247131] env[62627]: value = "task-2194941" [ 1098.247131] env[62627]: _type = "Task" [ 1098.247131] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.257183] env[62627]: DEBUG oslo_vmware.api [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1098.257183] env[62627]: value = "task-2194942" [ 1098.257183] env[62627]: _type = "Task" [ 1098.257183] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.262995] env[62627]: DEBUG oslo_vmware.api [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Task: {'id': task-2194938, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.427963} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.263228] env[62627]: DEBUG oslo_vmware.api [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.266234] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.266590] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1098.266798] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1098.266968] env[62627]: INFO nova.compute.manager [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1098.267963] env[62627]: DEBUG oslo.service.loopingcall [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1098.267963] env[62627]: DEBUG nova.compute.manager [-] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1098.267963] env[62627]: DEBUG nova.network.neutron [-] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1098.276345] env[62627]: DEBUG oslo_vmware.api [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194942, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.306027] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aa8c9cab-94c7-46b7-8134-868592161775 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.306027] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.941s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.318937] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194939, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.488557] env[62627]: DEBUG nova.network.neutron [-] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.523643] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc7fa7ac-c263-4b3e-a810-04818e104a88 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.532543] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e26ff2-6ed3-4d19-97fb-b70d329aa09c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.574963] env[62627]: DEBUG nova.compute.manager [req-8eeed524-6bd4-46a8-9450-2933f6d00567 req-f4fd0a6a-c205-4799-9cd0-d679b29cbbb9 service nova] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Detach interface failed, port_id=6e993a7b-e7ba-4beb-ac82-cd2f2b111371, reason: Instance e5a1fa80-6579-4334-9705-33a95dbc3797 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1098.576550] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.702365] env[62627]: INFO nova.compute.manager [-] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Took 1.60 seconds to deallocate network for instance. [ 1098.758153] env[62627]: DEBUG oslo_vmware.api [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194941, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214235} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.758432] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.758664] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1098.758890] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1098.759066] env[62627]: INFO nova.compute.manager [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1098.759310] env[62627]: DEBUG oslo.service.loopingcall [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1098.759659] env[62627]: DEBUG nova.compute.manager [-] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1098.759659] env[62627]: DEBUG nova.network.neutron [-] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1098.771883] env[62627]: DEBUG oslo_vmware.api [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194942, 'name': ReconfigVM_Task, 'duration_secs': 0.153638} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.772195] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447765', 'volume_id': '05c71695-2066-4611-ae96-3ddb34d2b201', 'name': 'volume-05c71695-2066-4611-ae96-3ddb34d2b201', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '457e5aa6-d229-4f48-8817-e918ec4bd0f9', 'attached_at': '', 'detached_at': '', 'volume_id': '05c71695-2066-4611-ae96-3ddb34d2b201', 'serial': '05c71695-2066-4611-ae96-3ddb34d2b201'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1098.791581] env[62627]: DEBUG nova.compute.manager [req-4b9eb83f-1e5b-4f05-9329-a91540256f2f req-94a1d7c2-26e3-477e-a5c9-201985bae255 service nova] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Received event network-vif-deleted-861a18bd-3c64-44b6-9e6b-a78d9449b584 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1098.822012] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194939, 'name': ReconfigVM_Task, 'duration_secs': 0.534972} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.824505] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 0ee04bf9-6aa6-485c-a979-191d52af236d/0ee04bf9-6aa6-485c-a979-191d52af236d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.825172] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0dfb0fe-5ee9-442b-a328-0b5fb3ee060b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.832114] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1098.832114] env[62627]: value = "task-2194943" [ 1098.832114] env[62627]: _type = "Task" [ 1098.832114] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.841242] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194943, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.995926] env[62627]: INFO nova.compute.manager [-] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Took 1.38 seconds to deallocate network for instance. [ 1099.209162] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.343962] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194943, 'name': Rename_Task, 'duration_secs': 0.168318} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.344252] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1099.345177] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 241311b0-ba81-41d1-a9b6-cd40d04dbe3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.345323] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance aa181094-cb78-44e0-8d7a-1d677e0af1fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.345447] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.345566] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 457e5aa6-d229-4f48-8817-e918ec4bd0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.345681] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 66882603-6ff6-49c2-9f42-3521ecaf98b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.345796] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance ae4ec050-5062-4e57-8090-f67f8c8013ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.345916] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.346034] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 84b8381c-bdd4-462e-aa98-0cc972f00882 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.346152] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance f6357f01-d7cb-4477-be2d-98fc17d7a940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.346266] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance e9d33611-e373-4303-937d-b43d893e51f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.346690] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 513c755a-424a-4e47-887e-7b315ad2e583 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.346690] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance e5a1fa80-6579-4334-9705-33a95dbc3797 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.346690] env[62627]: WARNING nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance e71794b9-22eb-4643-8587-f12a198024a4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1099.346841] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 78307469-ebbe-45bc-a146-eaa298e9b707 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.346841] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 0ee04bf9-6aa6-485c-a979-191d52af236d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1099.347982] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df7b5760-f806-4cf4-877d-cf63421f91b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.354232] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1099.354232] env[62627]: value = "task-2194944" [ 1099.354232] env[62627]: _type = "Task" [ 1099.354232] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.362388] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194944, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.392819] env[62627]: DEBUG nova.network.neutron [-] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.503144] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.583214] env[62627]: DEBUG nova.network.neutron [-] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.811197] env[62627]: DEBUG nova.objects.instance [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lazy-loading 'flavor' on Instance uuid 457e5aa6-d229-4f48-8817-e918ec4bd0f9 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1099.850455] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 9e0693d9-077a-4a02-b1c5-40fdddcdd27d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.850748] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1099.850898] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1099.863424] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194944, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.895834] env[62627]: INFO nova.compute.manager [-] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Took 1.63 seconds to deallocate network for instance. [ 1100.073015] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe983ab-827f-4b23-8290-d43dd15bd3fa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.081236] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242f9235-5255-48d5-870d-74ce4fad9736 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.089021] env[62627]: INFO nova.compute.manager [-] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Took 1.33 seconds to deallocate network for instance. [ 1100.115774] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427ea4db-78a0-4948-a369-e03b74b54bc1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.124082] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b9bc64-a309-4359-9e27-5e4639385852 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.140276] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.165725] env[62627]: DEBUG nova.compute.manager [req-0806d8e4-5511-4764-a1ae-9d89aa9e3287 req-0f200bb2-1aff-4e11-be17-a390186230ae service nova] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Received event network-vif-deleted-3dc09d47-bf2c-4769-96df-8bc30f2bb658 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1100.317633] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6c29496a-43b0-4a80-bc7e-387e9bc3e96c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.807s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.368843] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194944, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.404916] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.619703] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.643710] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1100.865419] env[62627]: DEBUG oslo_vmware.api [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194944, 'name': PowerOnVM_Task, 'duration_secs': 1.026301} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.865824] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1100.865878] env[62627]: INFO nova.compute.manager [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Took 8.42 seconds to spawn the instance on the hypervisor. [ 1100.866165] env[62627]: DEBUG nova.compute.manager [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1100.867073] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b4c3a9-0610-4447-bbcc-c746fdfd9abb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.884387] env[62627]: DEBUG nova.compute.manager [req-2bf07981-8b69-40d6-9e72-214db0d1b127 req-d5fb9683-fd92-4f12-9d39-4b4d098dc011 service nova] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Received event network-vif-deleted-75c54554-1d03-46ab-bb41-d84aca2543fe {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1100.988644] env[62627]: INFO nova.compute.manager [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Rescuing [ 1100.989169] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.989347] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.989527] env[62627]: DEBUG nova.network.neutron [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1101.055509] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "e9d33611-e373-4303-937d-b43d893e51f5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.055906] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.056212] env[62627]: DEBUG nova.compute.manager [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1101.057576] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8e6df0-6339-4005-bc69-96377c290e59 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.067915] env[62627]: DEBUG nova.compute.manager [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62627) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1101.068735] env[62627]: DEBUG nova.objects.instance [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'flavor' on Instance uuid e9d33611-e373-4303-937d-b43d893e51f5 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.149618] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1101.149946] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.845s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.150745] env[62627]: DEBUG oslo_concurrency.lockutils [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.776s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.151038] env[62627]: DEBUG oslo_concurrency.lockutils [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.154831] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.578s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.157097] env[62627]: INFO nova.compute.claims [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1101.181901] env[62627]: INFO nova.scheduler.client.report [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted allocations for instance e71794b9-22eb-4643-8587-f12a198024a4 [ 1101.386568] env[62627]: INFO nova.compute.manager [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Took 14.62 seconds to build instance. [ 1101.691583] env[62627]: DEBUG oslo_concurrency.lockutils [None req-552c8601-5415-4ee1-a9a5-e69b13eacbb5 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "e71794b9-22eb-4643-8587-f12a198024a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.141s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.875498] env[62627]: DEBUG nova.network.neutron [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updating instance_info_cache with network_info: [{"id": "62b171c6-c991-4816-86e3-ba8a0862f911", "address": "fa:16:3e:d7:1a:59", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62b171c6-c9", "ovs_interfaceid": "62b171c6-c991-4816-86e3-ba8a0862f911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.888642] env[62627]: DEBUG oslo_concurrency.lockutils [None req-76d6e289-0c76-42c2-a170-665d540d0b0b tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "0ee04bf9-6aa6-485c-a979-191d52af236d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.126s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.076974] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1102.077317] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44ba09e6-f70a-4ca9-9236-63740e80f8c9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.087121] env[62627]: DEBUG oslo_vmware.api [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1102.087121] env[62627]: value = "task-2194945" [ 1102.087121] env[62627]: _type = "Task" [ 1102.087121] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.098558] env[62627]: DEBUG oslo_vmware.api [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.378743] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.459009] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5402a9c-e126-430a-8a7d-f8148912e555 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.467802] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7bf673-83e1-4bca-8ebf-2d2221a5c6cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.500577] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbf218a-144f-49d7-ba98-493835ffdfde {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.508481] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b83c4e-f722-4b3d-aa73-c7a91fc0baf4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.522044] env[62627]: DEBUG nova.compute.provider_tree [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.597570] env[62627]: DEBUG oslo_vmware.api [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194945, 'name': PowerOffVM_Task, 'duration_secs': 0.24286} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.597875] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1102.598094] env[62627]: DEBUG nova.compute.manager [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1102.598903] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614805e1-9ff7-467f-9d0c-80b675d289b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.783253] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365539f5-1f5b-48f0-a3ad-0404e1668c2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.790928] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8043c7a9-b1ba-4d1f-b644-c6421baf009c tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Suspending the VM {{(pid=62627) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1102.793967] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-5d52d933-636a-45de-9753-ba62bf4fde4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.800255] env[62627]: DEBUG oslo_vmware.api [None req-8043c7a9-b1ba-4d1f-b644-c6421baf009c tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1102.800255] env[62627]: value = "task-2194946" [ 1102.800255] env[62627]: _type = "Task" [ 1102.800255] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.813981] env[62627]: DEBUG oslo_vmware.api [None req-8043c7a9-b1ba-4d1f-b644-c6421baf009c tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194946, 'name': SuspendVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.026025] env[62627]: DEBUG nova.scheduler.client.report [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.113767] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1b565851-68bd-4b12-b588-7f1dfaba568e tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.058s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.314300] env[62627]: DEBUG oslo_vmware.api [None req-8043c7a9-b1ba-4d1f-b644-c6421baf009c tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194946, 'name': SuspendVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.415241] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquiring lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.415496] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.529558] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.375s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.530105] env[62627]: DEBUG nova.compute.manager [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1103.532705] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.324s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.532929] env[62627]: DEBUG nova.objects.instance [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Lazy-loading 'resources' on Instance uuid 513c755a-424a-4e47-887e-7b315ad2e583 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.558573] env[62627]: DEBUG nova.objects.instance [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'flavor' on Instance uuid e9d33611-e373-4303-937d-b43d893e51f5 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.811273] env[62627]: DEBUG oslo_vmware.api [None req-8043c7a9-b1ba-4d1f-b644-c6421baf009c tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194946, 'name': SuspendVM_Task, 'duration_secs': 0.694051} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.811550] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8043c7a9-b1ba-4d1f-b644-c6421baf009c tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Suspended the VM {{(pid=62627) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1103.811728] env[62627]: DEBUG nova.compute.manager [None req-8043c7a9-b1ba-4d1f-b644-c6421baf009c tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.812507] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d701f9-eeca-4741-b517-5a7d800ce668 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.919307] env[62627]: DEBUG nova.compute.manager [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1103.924525] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1103.924796] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65735a67-a71c-462f-bd70-26943d1b66ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.931857] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1103.931857] env[62627]: value = "task-2194947" [ 1103.931857] env[62627]: _type = "Task" [ 1103.931857] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.940521] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194947, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.036608] env[62627]: DEBUG nova.compute.utils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1104.040917] env[62627]: DEBUG nova.compute.manager [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1104.040917] env[62627]: DEBUG nova.network.neutron [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1104.066716] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.067455] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.067455] env[62627]: DEBUG nova.network.neutron [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1104.067455] env[62627]: DEBUG nova.objects.instance [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'info_cache' on Instance uuid e9d33611-e373-4303-937d-b43d893e51f5 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.097425] env[62627]: DEBUG nova.policy [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4289413aec40498dab5f6170d69eb39a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74aed70194cf444884768a5c89fe8613', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1104.218774] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.219039] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.291893] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdb040c-5778-4423-97d2-1faf49c8add0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.299764] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e05b84-adb6-4650-878e-1d697006ae88 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.335693] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c225f911-8c1e-469a-bcee-45ca47411e9a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.347414] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d848fc-7a0a-4524-b73a-1f35984bc515 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.364917] env[62627]: DEBUG nova.compute.provider_tree [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.439420] env[62627]: DEBUG nova.network.neutron [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Successfully created port: 880f6f52-e5e1-4cef-ac19-65931c8f46a6 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1104.442021] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.446069] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194947, 'name': PowerOffVM_Task, 'duration_secs': 0.192249} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.446069] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1104.446720] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c9c904-2ddb-40e8-bb71-23bc8c34eb49 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.466736] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5117e83b-63dc-4657-a147-66c79eb53e2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.497872] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.498162] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d370232d-0244-4abb-93c1-f63001c0fa57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.504029] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1104.504029] env[62627]: value = "task-2194948" [ 1104.504029] env[62627]: _type = "Task" [ 1104.504029] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.511703] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194948, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.541473] env[62627]: DEBUG nova.compute.manager [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1104.572074] env[62627]: DEBUG nova.objects.base [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1104.721982] env[62627]: DEBUG nova.compute.manager [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1104.870594] env[62627]: DEBUG nova.scheduler.client.report [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.015069] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1105.015298] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1105.015550] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.015706] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.015881] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1105.016538] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc3975c0-38d9-40ef-bd60-2e7305dc92bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.027753] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1105.027941] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1105.028764] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-633bf9f5-8ebb-42af-b49e-6d7acc9bfdb4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.033631] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1105.033631] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521524ba-0129-95d0-df77-112c0b257e8b" [ 1105.033631] env[62627]: _type = "Task" [ 1105.033631] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.041016] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521524ba-0129-95d0-df77-112c0b257e8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.243506] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.261281] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "0ee04bf9-6aa6-485c-a979-191d52af236d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.261403] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "0ee04bf9-6aa6-485c-a979-191d52af236d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.261600] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "0ee04bf9-6aa6-485c-a979-191d52af236d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.261780] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "0ee04bf9-6aa6-485c-a979-191d52af236d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.261948] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "0ee04bf9-6aa6-485c-a979-191d52af236d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.263858] env[62627]: INFO nova.compute.manager [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Terminating instance [ 1105.282365] env[62627]: DEBUG nova.network.neutron [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance_info_cache with network_info: [{"id": "5825a537-f659-4e7e-907e-e22208e06c0c", "address": "fa:16:3e:a9:0b:9d", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5825a537-f6", "ovs_interfaceid": "5825a537-f659-4e7e-907e-e22208e06c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.375463] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.378110] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.875s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.378110] env[62627]: DEBUG nova.objects.instance [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lazy-loading 'resources' on Instance uuid e5a1fa80-6579-4334-9705-33a95dbc3797 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1105.393799] env[62627]: INFO nova.scheduler.client.report [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Deleted allocations for instance 513c755a-424a-4e47-887e-7b315ad2e583 [ 1105.543507] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521524ba-0129-95d0-df77-112c0b257e8b, 'name': SearchDatastore_Task, 'duration_secs': 0.029852} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.544280] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74ccc786-8c1a-4e18-9bba-00b6331f75c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.550249] env[62627]: DEBUG nova.compute.manager [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1105.552097] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1105.552097] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5226b253-db03-39b2-11c8-63825e6fd794" [ 1105.552097] env[62627]: _type = "Task" [ 1105.552097] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.560397] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5226b253-db03-39b2-11c8-63825e6fd794, 'name': SearchDatastore_Task, 'duration_secs': 0.008341} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.560704] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.560899] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 457e5aa6-d229-4f48-8817-e918ec4bd0f9/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. {{(pid=62627) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1105.561148] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49fdeca9-33e3-41ac-9f5d-ae9ed48c02f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.567576] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1105.567576] env[62627]: value = "task-2194949" [ 1105.567576] env[62627]: _type = "Task" [ 1105.567576] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.574988] env[62627]: DEBUG nova.virt.hardware [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1105.575202] env[62627]: DEBUG nova.virt.hardware [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1105.575362] env[62627]: DEBUG nova.virt.hardware [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1105.575545] env[62627]: DEBUG nova.virt.hardware [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1105.575709] env[62627]: DEBUG nova.virt.hardware [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1105.575859] env[62627]: DEBUG nova.virt.hardware [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1105.576074] env[62627]: DEBUG nova.virt.hardware [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1105.576237] env[62627]: DEBUG nova.virt.hardware [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1105.576436] env[62627]: DEBUG nova.virt.hardware [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1105.576590] env[62627]: DEBUG nova.virt.hardware [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1105.576766] env[62627]: DEBUG nova.virt.hardware [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1105.577525] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853b1edb-205f-49dc-bda0-e81d920126b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.582519] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194949, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.587061] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fc1c7c-6dcb-4e6f-a7c9-1fb66859d116 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.768013] env[62627]: DEBUG nova.compute.manager [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1105.768322] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1105.769190] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec589285-c240-468d-8bc4-45f950461b61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.776669] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1105.776935] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfb17c52-b29a-4f8e-a8f7-31f424943738 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.785534] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.863071] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1105.866447] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1105.866447] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleting the datastore file [datastore2] 0ee04bf9-6aa6-485c-a979-191d52af236d {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.866447] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9074a23b-c467-41fd-938f-39c2f0c44fbe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.873106] env[62627]: DEBUG oslo_vmware.api [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1105.873106] env[62627]: value = "task-2194951" [ 1105.873106] env[62627]: _type = "Task" [ 1105.873106] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.883886] env[62627]: DEBUG oslo_vmware.api [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194951, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.901977] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0f753cbb-8047-4ac0-a4e1-0001f8b7b4ea tempest-ServersTestFqdnHostnames-27247144 tempest-ServersTestFqdnHostnames-27247144-project-member] Lock "513c755a-424a-4e47-887e-7b315ad2e583" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.431s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.942025] env[62627]: DEBUG nova.compute.manager [req-57fd9564-8dcd-470b-a6b8-1c5a43f9392f req-3065adb3-50d8-46bd-80da-246a835e5454 service nova] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Received event network-vif-plugged-880f6f52-e5e1-4cef-ac19-65931c8f46a6 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1105.942334] env[62627]: DEBUG oslo_concurrency.lockutils [req-57fd9564-8dcd-470b-a6b8-1c5a43f9392f req-3065adb3-50d8-46bd-80da-246a835e5454 service nova] Acquiring lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.942556] env[62627]: DEBUG oslo_concurrency.lockutils [req-57fd9564-8dcd-470b-a6b8-1c5a43f9392f req-3065adb3-50d8-46bd-80da-246a835e5454 service nova] Lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.942729] env[62627]: DEBUG oslo_concurrency.lockutils [req-57fd9564-8dcd-470b-a6b8-1c5a43f9392f req-3065adb3-50d8-46bd-80da-246a835e5454 service nova] Lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.942894] env[62627]: DEBUG nova.compute.manager [req-57fd9564-8dcd-470b-a6b8-1c5a43f9392f req-3065adb3-50d8-46bd-80da-246a835e5454 service nova] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] No waiting events found dispatching network-vif-plugged-880f6f52-e5e1-4cef-ac19-65931c8f46a6 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1105.943634] env[62627]: WARNING nova.compute.manager [req-57fd9564-8dcd-470b-a6b8-1c5a43f9392f req-3065adb3-50d8-46bd-80da-246a835e5454 service nova] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Received unexpected event network-vif-plugged-880f6f52-e5e1-4cef-ac19-65931c8f46a6 for instance with vm_state building and task_state spawning. [ 1106.069611] env[62627]: DEBUG nova.network.neutron [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Successfully updated port: 880f6f52-e5e1-4cef-ac19-65931c8f46a6 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1106.081103] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194949, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441798} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.081939] env[62627]: INFO nova.virt.vmwareapi.ds_util [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 457e5aa6-d229-4f48-8817-e918ec4bd0f9/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk. [ 1106.082806] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b182de-3d5b-4f79-962d-bcd1b1edf616 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.114634] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 457e5aa6-d229-4f48-8817-e918ec4bd0f9/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1106.117790] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4189aa1-625d-486e-8a25-8949c1af6143 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.135919] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1106.135919] env[62627]: value = "task-2194952" [ 1106.135919] env[62627]: _type = "Task" [ 1106.135919] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.146276] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194952, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.172073] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e17bf67-a44a-4706-9ade-b3bbf1a31682 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.179281] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04b52f8-fc1c-44f2-8d26-acdf9fc32b30 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.211677] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47353983-3eb7-4be5-a24c-e5390af20c25 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.219480] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facd8b6f-9a80-45f7-93a8-6c78c0dba6ea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.234169] env[62627]: DEBUG nova.compute.provider_tree [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1106.383452] env[62627]: DEBUG oslo_vmware.api [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2194951, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249165} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.383710] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1106.383893] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1106.384085] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1106.384265] env[62627]: INFO nova.compute.manager [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1106.384512] env[62627]: DEBUG oslo.service.loopingcall [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1106.384744] env[62627]: DEBUG nova.compute.manager [-] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1106.384908] env[62627]: DEBUG nova.network.neutron [-] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1106.573911] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "refresh_cache-9e0693d9-077a-4a02-b1c5-40fdddcdd27d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.573911] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "refresh_cache-9e0693d9-077a-4a02-b1c5-40fdddcdd27d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.573911] env[62627]: DEBUG nova.network.neutron [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1106.645442] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194952, 'name': ReconfigVM_Task, 'duration_secs': 0.298796} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.645768] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 457e5aa6-d229-4f48-8817-e918ec4bd0f9/d5497cf1-16e8-478a-a834-e9289b1751b7-rescue.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.646581] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18449983-a99a-4113-902f-098fa63f05db {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.675961] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20f14829-ef2a-4abf-af16-81bf822be4cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.692292] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1106.692292] env[62627]: value = "task-2194953" [ 1106.692292] env[62627]: _type = "Task" [ 1106.692292] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.699842] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194953, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.737948] env[62627]: DEBUG nova.scheduler.client.report [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1106.791528] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1106.792183] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9dcae38d-57b7-4773-b7f9-6b7e501c43fe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.800129] env[62627]: DEBUG oslo_vmware.api [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1106.800129] env[62627]: value = "task-2194954" [ 1106.800129] env[62627]: _type = "Task" [ 1106.800129] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.811212] env[62627]: DEBUG oslo_vmware.api [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194954, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.120823] env[62627]: DEBUG nova.network.neutron [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1107.136997] env[62627]: DEBUG nova.network.neutron [-] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.206824] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194953, 'name': ReconfigVM_Task, 'duration_secs': 0.176826} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.207173] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1107.207453] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-855c7f76-2da0-4f03-b230-bdee3a01e5ad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.213900] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1107.213900] env[62627]: value = "task-2194955" [ 1107.213900] env[62627]: _type = "Task" [ 1107.213900] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.222907] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.245137] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.247700] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.843s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.247807] env[62627]: DEBUG nova.objects.instance [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Lazy-loading 'resources' on Instance uuid 66882603-6ff6-49c2-9f42-3521ecaf98b2 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1107.271642] env[62627]: INFO nova.scheduler.client.report [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Deleted allocations for instance e5a1fa80-6579-4334-9705-33a95dbc3797 [ 1107.311882] env[62627]: DEBUG oslo_vmware.api [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2194954, 'name': PowerOnVM_Task, 'duration_secs': 0.418426} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.312180] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1107.312384] env[62627]: DEBUG nova.compute.manager [None req-3b542245-d424-4b52-b8ae-70ee9b06f119 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1107.313347] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c184eb3f-fc0c-4c71-8c2b-a562d19399f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.383812] env[62627]: DEBUG nova.network.neutron [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Updating instance_info_cache with network_info: [{"id": "880f6f52-e5e1-4cef-ac19-65931c8f46a6", "address": "fa:16:3e:12:73:4e", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap880f6f52-e5", "ovs_interfaceid": "880f6f52-e5e1-4cef-ac19-65931c8f46a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.640245] env[62627]: INFO nova.compute.manager [-] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Took 1.25 seconds to deallocate network for instance. [ 1107.724844] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194955, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.785220] env[62627]: DEBUG oslo_concurrency.lockutils [None req-dcf62b44-4998-476e-a5df-d8d19a25c78f tempest-ListServerFiltersTestJSON-1687006819 tempest-ListServerFiltersTestJSON-1687006819-project-member] Lock "e5a1fa80-6579-4334-9705-33a95dbc3797" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.825s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.887277] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "refresh_cache-9e0693d9-077a-4a02-b1c5-40fdddcdd27d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.887372] env[62627]: DEBUG nova.compute.manager [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Instance network_info: |[{"id": "880f6f52-e5e1-4cef-ac19-65931c8f46a6", "address": "fa:16:3e:12:73:4e", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap880f6f52-e5", "ovs_interfaceid": "880f6f52-e5e1-4cef-ac19-65931c8f46a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1107.887712] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:73:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '098df9b7-d759-47f7-b756-334848cb423b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '880f6f52-e5e1-4cef-ac19-65931c8f46a6', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1107.896088] env[62627]: DEBUG oslo.service.loopingcall [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1107.896339] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1107.896585] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f08eda55-3539-47e0-b3e5-59e7c4eb72d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.929645] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1107.929645] env[62627]: value = "task-2194956" [ 1107.929645] env[62627]: _type = "Task" [ 1107.929645] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.938806] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194956, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.000033] env[62627]: DEBUG nova.compute.manager [req-d78e42bb-59ca-417d-8ae4-df921af23f79 req-dd7e7a67-9334-4dac-9bc2-49a1a31cfe0c service nova] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Received event network-changed-880f6f52-e5e1-4cef-ac19-65931c8f46a6 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1108.000033] env[62627]: DEBUG nova.compute.manager [req-d78e42bb-59ca-417d-8ae4-df921af23f79 req-dd7e7a67-9334-4dac-9bc2-49a1a31cfe0c service nova] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Refreshing instance network info cache due to event network-changed-880f6f52-e5e1-4cef-ac19-65931c8f46a6. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1108.000033] env[62627]: DEBUG oslo_concurrency.lockutils [req-d78e42bb-59ca-417d-8ae4-df921af23f79 req-dd7e7a67-9334-4dac-9bc2-49a1a31cfe0c service nova] Acquiring lock "refresh_cache-9e0693d9-077a-4a02-b1c5-40fdddcdd27d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.000216] env[62627]: DEBUG oslo_concurrency.lockutils [req-d78e42bb-59ca-417d-8ae4-df921af23f79 req-dd7e7a67-9334-4dac-9bc2-49a1a31cfe0c service nova] Acquired lock "refresh_cache-9e0693d9-077a-4a02-b1c5-40fdddcdd27d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.000577] env[62627]: DEBUG nova.network.neutron [req-d78e42bb-59ca-417d-8ae4-df921af23f79 req-dd7e7a67-9334-4dac-9bc2-49a1a31cfe0c service nova] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Refreshing network info cache for port 880f6f52-e5e1-4cef-ac19-65931c8f46a6 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1108.065129] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374ca7a2-8e7c-4fb0-8e52-c0a5aafb3929 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.074763] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67bbd4f-1753-4e21-8cec-b9e5d15de0cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.110838] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be600e6-e01b-4096-b2b3-f7d88a1dbfad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.121397] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef162d9-cb48-4fb5-89f0-0cbdb1c7e6b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.137009] env[62627]: DEBUG nova.compute.provider_tree [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.149890] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.227223] env[62627]: DEBUG oslo_vmware.api [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194955, 'name': PowerOnVM_Task, 'duration_secs': 0.710431} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.227631] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1108.230583] env[62627]: DEBUG nova.compute.manager [None req-1266a09b-2c68-4200-976b-bc74329914aa tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1108.231404] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36313c87-c9de-4fc9-9886-3179e9e9157b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.439459] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194956, 'name': CreateVM_Task, 'duration_secs': 0.310781} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.439598] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1108.440530] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.440530] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.441090] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1108.441178] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-079181b9-ee00-4119-9be7-62157abd58f8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.445891] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1108.445891] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52697826-d0b3-de4d-1f5b-73aac7f4502b" [ 1108.445891] env[62627]: _type = "Task" [ 1108.445891] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.452965] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52697826-d0b3-de4d-1f5b-73aac7f4502b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.640413] env[62627]: DEBUG nova.scheduler.client.report [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.713396] env[62627]: DEBUG nova.network.neutron [req-d78e42bb-59ca-417d-8ae4-df921af23f79 req-dd7e7a67-9334-4dac-9bc2-49a1a31cfe0c service nova] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Updated VIF entry in instance network info cache for port 880f6f52-e5e1-4cef-ac19-65931c8f46a6. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1108.713899] env[62627]: DEBUG nova.network.neutron [req-d78e42bb-59ca-417d-8ae4-df921af23f79 req-dd7e7a67-9334-4dac-9bc2-49a1a31cfe0c service nova] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Updating instance_info_cache with network_info: [{"id": "880f6f52-e5e1-4cef-ac19-65931c8f46a6", "address": "fa:16:3e:12:73:4e", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap880f6f52-e5", "ovs_interfaceid": "880f6f52-e5e1-4cef-ac19-65931c8f46a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.958119] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52697826-d0b3-de4d-1f5b-73aac7f4502b, 'name': SearchDatastore_Task, 'duration_secs': 0.026634} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.958598] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.959068] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.959458] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.959744] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.960149] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1108.960695] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a27694c-99ee-4d1d-b166-5d249be24f83 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.970804] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1108.973116] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1108.974799] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4979508b-07cd-4e37-b752-42faaab33230 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.982210] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1108.982210] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ab3dff-be10-27a2-0357-ff68b6d5fef9" [ 1108.982210] env[62627]: _type = "Task" [ 1108.982210] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.992146] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ab3dff-be10-27a2-0357-ff68b6d5fef9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.151573] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.903s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.157580] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.538s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.159130] env[62627]: DEBUG nova.objects.instance [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lazy-loading 'resources' on Instance uuid 78307469-ebbe-45bc-a146-eaa298e9b707 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.183246] env[62627]: INFO nova.scheduler.client.report [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Deleted allocations for instance 66882603-6ff6-49c2-9f42-3521ecaf98b2 [ 1109.218191] env[62627]: DEBUG oslo_concurrency.lockutils [req-d78e42bb-59ca-417d-8ae4-df921af23f79 req-dd7e7a67-9334-4dac-9bc2-49a1a31cfe0c service nova] Releasing lock "refresh_cache-9e0693d9-077a-4a02-b1c5-40fdddcdd27d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.218463] env[62627]: DEBUG nova.compute.manager [req-d78e42bb-59ca-417d-8ae4-df921af23f79 req-dd7e7a67-9334-4dac-9bc2-49a1a31cfe0c service nova] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Received event network-vif-deleted-83a5bd04-2ff4-4441-aa87-768435659d4d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1109.495022] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ab3dff-be10-27a2-0357-ff68b6d5fef9, 'name': SearchDatastore_Task, 'duration_secs': 0.009626} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.496839] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b710c75-9757-48e7-8f7b-1f43d9c46a6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.502646] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1109.502646] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52688e69-25c9-2bea-5b9c-dd5c5214bce4" [ 1109.502646] env[62627]: _type = "Task" [ 1109.502646] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.510545] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52688e69-25c9-2bea-5b9c-dd5c5214bce4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.646362] env[62627]: INFO nova.compute.manager [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Unrescuing [ 1109.646780] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.646826] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquired lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.646979] env[62627]: DEBUG nova.network.neutron [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1109.699466] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ebcb8a7d-435d-4640-a6ff-9951422d3f58 tempest-ServersTestManualDisk-1682543278 tempest-ServersTestManualDisk-1682543278-project-member] Lock "66882603-6ff6-49c2-9f42-3521ecaf98b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.104s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.853178] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e417f4-9f87-45af-9094-7c2687eec109 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.861132] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a408ecd-3b38-45b1-802c-1f9392ebbd6b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.891767] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff57239f-5490-47dc-a691-7aa57aa5e77e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.898961] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e312d6d-3456-4cb4-934d-264910a8da36 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.911961] env[62627]: DEBUG nova.compute.provider_tree [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.012410] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52688e69-25c9-2bea-5b9c-dd5c5214bce4, 'name': SearchDatastore_Task, 'duration_secs': 0.021156} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.012681] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1110.012937] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 9e0693d9-077a-4a02-b1c5-40fdddcdd27d/9e0693d9-077a-4a02-b1c5-40fdddcdd27d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1110.013210] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9819aea-31ac-4944-9944-9aa4a4596248 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.020784] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1110.020784] env[62627]: value = "task-2194957" [ 1110.020784] env[62627]: _type = "Task" [ 1110.020784] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.028719] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194957, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.415287] env[62627]: DEBUG nova.scheduler.client.report [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.532581] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194957, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.746790] env[62627]: DEBUG nova.network.neutron [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updating instance_info_cache with network_info: [{"id": "62b171c6-c991-4816-86e3-ba8a0862f911", "address": "fa:16:3e:d7:1a:59", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62b171c6-c9", "ovs_interfaceid": "62b171c6-c991-4816-86e3-ba8a0862f911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.921874] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.924411] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.482s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.926480] env[62627]: INFO nova.compute.claims [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1110.955783] env[62627]: INFO nova.scheduler.client.report [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted allocations for instance 78307469-ebbe-45bc-a146-eaa298e9b707 [ 1111.034521] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194957, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592165} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.034692] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 9e0693d9-077a-4a02-b1c5-40fdddcdd27d/9e0693d9-077a-4a02-b1c5-40fdddcdd27d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1111.034985] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1111.036302] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d02efe1d-5312-4b07-98bf-62e7586a42b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.043442] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1111.043442] env[62627]: value = "task-2194958" [ 1111.043442] env[62627]: _type = "Task" [ 1111.043442] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.054363] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194958, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.252330] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Releasing lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.253078] env[62627]: DEBUG nova.objects.instance [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lazy-loading 'flavor' on Instance uuid 457e5aa6-d229-4f48-8817-e918ec4bd0f9 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1111.469354] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4dd10e3c-0fa9-4220-97c6-eab1eeee5905 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "78307469-ebbe-45bc-a146-eaa298e9b707" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.359s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.559437] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194958, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.761238] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67e95e1-a1cf-4874-82ec-adaa7f2468b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.788876] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1111.789785] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-96b3cce7-05aa-4121-819d-8437a0a0296b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.796858] env[62627]: DEBUG oslo_vmware.api [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1111.796858] env[62627]: value = "task-2194959" [ 1111.796858] env[62627]: _type = "Task" [ 1111.796858] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.807461] env[62627]: DEBUG oslo_vmware.api [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.056506] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194958, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.892036} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.056881] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1112.057816] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7218027-0537-4d84-ae82-69f3b4c5a443 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.087524] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 9e0693d9-077a-4a02-b1c5-40fdddcdd27d/9e0693d9-077a-4a02-b1c5-40fdddcdd27d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1112.090479] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a979f84-f4fb-4513-9926-d30f9dbeb712 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.111145] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1112.111145] env[62627]: value = "task-2194960" [ 1112.111145] env[62627]: _type = "Task" [ 1112.111145] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.122098] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194960, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.227309] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9dc60a-676d-4335-b943-aa1309bc9672 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.237345] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dfac230-d445-4a4b-96bb-61a8821bae26 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.280402] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622e2ebf-94b1-4449-a3ca-96662f812967 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.287909] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de17bbf4-7a29-4d3d-83e4-24b720490eac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.302064] env[62627]: DEBUG nova.compute.provider_tree [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.311938] env[62627]: DEBUG oslo_vmware.api [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194959, 'name': PowerOffVM_Task, 'duration_secs': 0.39527} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.312104] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1112.318042] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Reconfiguring VM instance instance-00000033 to detach disk 2002 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1112.318295] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3896a279-716b-4c87-bfb1-98a97594c82b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.337341] env[62627]: DEBUG oslo_vmware.api [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1112.337341] env[62627]: value = "task-2194961" [ 1112.337341] env[62627]: _type = "Task" [ 1112.337341] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.346285] env[62627]: DEBUG oslo_vmware.api [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194961, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.623613] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194960, 'name': ReconfigVM_Task, 'duration_secs': 0.302453} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.623996] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 9e0693d9-077a-4a02-b1c5-40fdddcdd27d/9e0693d9-077a-4a02-b1c5-40fdddcdd27d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1112.625082] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc1b1930-48df-4f1a-b0af-4392ec661b8e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.631332] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1112.631332] env[62627]: value = "task-2194962" [ 1112.631332] env[62627]: _type = "Task" [ 1112.631332] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.645933] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194962, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.810848] env[62627]: DEBUG nova.scheduler.client.report [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1112.847783] env[62627]: DEBUG oslo_vmware.api [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194961, 'name': ReconfigVM_Task, 'duration_secs': 0.505536} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.848043] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Reconfigured VM instance instance-00000033 to detach disk 2002 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1112.848244] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1112.848485] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cfeec47-191d-422d-a98e-458c73c57e2a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.854192] env[62627]: DEBUG oslo_vmware.api [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1112.854192] env[62627]: value = "task-2194963" [ 1112.854192] env[62627]: _type = "Task" [ 1112.854192] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.861866] env[62627]: DEBUG oslo_vmware.api [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194963, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.130479] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquiring lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.130707] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1113.142548] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194962, 'name': Rename_Task, 'duration_secs': 0.155024} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.142916] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1113.143548] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ec01580-f842-4772-b4b4-3baf07faeb2a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.153303] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1113.153303] env[62627]: value = "task-2194964" [ 1113.153303] env[62627]: _type = "Task" [ 1113.153303] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.162032] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194964, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.243797] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "b282093d-8013-436d-94d2-bc26161456ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.244048] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "b282093d-8013-436d-94d2-bc26161456ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1113.316568] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.392s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.317150] env[62627]: DEBUG nova.compute.manager [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1113.319781] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.076s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1113.321200] env[62627]: INFO nova.compute.claims [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1113.367726] env[62627]: DEBUG oslo_vmware.api [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194963, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.636795] env[62627]: DEBUG nova.compute.manager [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1113.667173] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194964, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.746973] env[62627]: DEBUG nova.compute.manager [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1113.825562] env[62627]: DEBUG nova.compute.utils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1113.830436] env[62627]: DEBUG nova.compute.manager [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1113.830649] env[62627]: DEBUG nova.network.neutron [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1113.869080] env[62627]: DEBUG oslo_vmware.api [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2194963, 'name': PowerOnVM_Task, 'duration_secs': 0.684455} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.869717] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1113.870923] env[62627]: DEBUG nova.compute.manager [None req-0037d497-d452-43e1-9543-c05ac87f769e tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1113.876024] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce016b7-01c1-4d2c-a5e9-2df73bc9e7e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.064317] env[62627]: DEBUG nova.policy [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e05d5b71e1184f8795d34e4c72251bd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98810a7e17cc4103871e1779a2bd4c77', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1114.167807] env[62627]: DEBUG oslo_vmware.api [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194964, 'name': PowerOnVM_Task, 'duration_secs': 0.731549} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.169326] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.169833] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1114.170032] env[62627]: INFO nova.compute.manager [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Took 8.62 seconds to spawn the instance on the hypervisor. [ 1114.170275] env[62627]: DEBUG nova.compute.manager [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1114.171597] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd05eaba-85a7-4b7c-8dd3-b2110eab660f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.278024] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.330879] env[62627]: DEBUG nova.compute.manager [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1114.546955] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17837e95-13b9-4cef-b05a-2648fc583847 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.561251] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6d5c81-8546-4cb0-9cea-216b8053239e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.616719] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9f3b1c-69a7-48a4-8add-39be2ebb2e69 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.625938] env[62627]: DEBUG nova.network.neutron [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Successfully created port: e953f345-dc88-46fa-bee5-036a4f2d9b4c {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1114.634280] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bc38cc-b522-4ae5-a45b-66db37fdd472 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.651778] env[62627]: DEBUG nova.compute.provider_tree [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1114.705096] env[62627]: INFO nova.compute.manager [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Took 16.15 seconds to build instance. [ 1115.177613] env[62627]: ERROR nova.scheduler.client.report [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [req-c5467e33-95c0-40cd-8a68-c3631a62109f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c5467e33-95c0-40cd-8a68-c3631a62109f"}]} [ 1115.193643] env[62627]: DEBUG nova.scheduler.client.report [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1115.207146] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f96753b7-9af5-4794-b152-37feb479e8d0 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.664s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.211031] env[62627]: DEBUG nova.scheduler.client.report [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1115.211031] env[62627]: DEBUG nova.compute.provider_tree [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1115.223177] env[62627]: DEBUG nova.scheduler.client.report [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1115.244786] env[62627]: DEBUG nova.scheduler.client.report [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1115.349056] env[62627]: DEBUG nova.compute.manager [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1115.390786] env[62627]: DEBUG nova.virt.hardware [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1115.391041] env[62627]: DEBUG nova.virt.hardware [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1115.391360] env[62627]: DEBUG nova.virt.hardware [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1115.391480] env[62627]: DEBUG nova.virt.hardware [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1115.391554] env[62627]: DEBUG nova.virt.hardware [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1115.391656] env[62627]: DEBUG nova.virt.hardware [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1115.391856] env[62627]: DEBUG nova.virt.hardware [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1115.392033] env[62627]: DEBUG nova.virt.hardware [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1115.392234] env[62627]: DEBUG nova.virt.hardware [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1115.392396] env[62627]: DEBUG nova.virt.hardware [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1115.392563] env[62627]: DEBUG nova.virt.hardware [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1115.393447] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2d1ecd-8411-4b06-915a-6ec147181bab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.408476] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b1a85a-ff5f-455a-be36-203505fe1ee1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.520346] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97057a93-0bf0-4e6c-829c-5c46677d65b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.525872] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e0b7f6-dc3a-4e5d-9955-09991e75e3ad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.558264] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adf8597-9090-4d6b-bb1b-fa9aa82e6a4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.568215] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615f3477-e3b9-41bf-87e3-3e713922bce7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.587020] env[62627]: DEBUG nova.compute.provider_tree [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1116.126247] env[62627]: DEBUG nova.scheduler.client.report [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 107 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1116.126409] env[62627]: DEBUG nova.compute.provider_tree [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 107 to 108 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1116.126610] env[62627]: DEBUG nova.compute.provider_tree [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1116.157611] env[62627]: DEBUG nova.compute.manager [req-5fd34c13-997a-4146-b0bc-01a459fed63b req-81c5b187-1074-4e57-bf87-a1733507011b service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Received event network-changed-62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1116.157831] env[62627]: DEBUG nova.compute.manager [req-5fd34c13-997a-4146-b0bc-01a459fed63b req-81c5b187-1074-4e57-bf87-a1733507011b service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Refreshing instance network info cache due to event network-changed-62b171c6-c991-4816-86e3-ba8a0862f911. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1116.158559] env[62627]: DEBUG oslo_concurrency.lockutils [req-5fd34c13-997a-4146-b0bc-01a459fed63b req-81c5b187-1074-4e57-bf87-a1733507011b service nova] Acquiring lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1116.158559] env[62627]: DEBUG oslo_concurrency.lockutils [req-5fd34c13-997a-4146-b0bc-01a459fed63b req-81c5b187-1074-4e57-bf87-a1733507011b service nova] Acquired lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.159213] env[62627]: DEBUG nova.network.neutron [req-5fd34c13-997a-4146-b0bc-01a459fed63b req-81c5b187-1074-4e57-bf87-a1733507011b service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Refreshing network info cache for port 62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1116.179167] env[62627]: DEBUG nova.compute.manager [req-cfe4c445-e76f-4378-a4a1-3eaa5a5ef2b7 req-515591c0-6e5b-4721-8666-f5b9620e57e4 service nova] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Received event network-vif-plugged-e953f345-dc88-46fa-bee5-036a4f2d9b4c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1116.179167] env[62627]: DEBUG oslo_concurrency.lockutils [req-cfe4c445-e76f-4378-a4a1-3eaa5a5ef2b7 req-515591c0-6e5b-4721-8666-f5b9620e57e4 service nova] Acquiring lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.179167] env[62627]: DEBUG oslo_concurrency.lockutils [req-cfe4c445-e76f-4378-a4a1-3eaa5a5ef2b7 req-515591c0-6e5b-4721-8666-f5b9620e57e4 service nova] Lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.179167] env[62627]: DEBUG oslo_concurrency.lockutils [req-cfe4c445-e76f-4378-a4a1-3eaa5a5ef2b7 req-515591c0-6e5b-4721-8666-f5b9620e57e4 service nova] Lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.179167] env[62627]: DEBUG nova.compute.manager [req-cfe4c445-e76f-4378-a4a1-3eaa5a5ef2b7 req-515591c0-6e5b-4721-8666-f5b9620e57e4 service nova] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] No waiting events found dispatching network-vif-plugged-e953f345-dc88-46fa-bee5-036a4f2d9b4c {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1116.179167] env[62627]: WARNING nova.compute.manager [req-cfe4c445-e76f-4378-a4a1-3eaa5a5ef2b7 req-515591c0-6e5b-4721-8666-f5b9620e57e4 service nova] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Received unexpected event network-vif-plugged-e953f345-dc88-46fa-bee5-036a4f2d9b4c for instance with vm_state building and task_state spawning. [ 1116.273850] env[62627]: DEBUG nova.network.neutron [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Successfully updated port: e953f345-dc88-46fa-bee5-036a4f2d9b4c {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1116.639058] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.319s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.639487] env[62627]: DEBUG nova.compute.manager [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1116.642279] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.493s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.642496] env[62627]: DEBUG nova.objects.instance [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lazy-loading 'resources' on Instance uuid 0ee04bf9-6aa6-485c-a979-191d52af236d {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1116.656093] env[62627]: INFO nova.compute.manager [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Rebuilding instance [ 1116.695582] env[62627]: DEBUG nova.compute.manager [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1116.696995] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5771d8-04c9-4034-9c2c-a29ae4089704 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.777189] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquiring lock "refresh_cache-c0e4ea33-2397-4716-8f6a-603fe099fdc5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1116.777493] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquired lock "refresh_cache-c0e4ea33-2397-4716-8f6a-603fe099fdc5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.777901] env[62627]: DEBUG nova.network.neutron [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1116.897198] env[62627]: DEBUG nova.network.neutron [req-5fd34c13-997a-4146-b0bc-01a459fed63b req-81c5b187-1074-4e57-bf87-a1733507011b service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updated VIF entry in instance network info cache for port 62b171c6-c991-4816-86e3-ba8a0862f911. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1116.897581] env[62627]: DEBUG nova.network.neutron [req-5fd34c13-997a-4146-b0bc-01a459fed63b req-81c5b187-1074-4e57-bf87-a1733507011b service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updating instance_info_cache with network_info: [{"id": "62b171c6-c991-4816-86e3-ba8a0862f911", "address": "fa:16:3e:d7:1a:59", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62b171c6-c9", "ovs_interfaceid": "62b171c6-c991-4816-86e3-ba8a0862f911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.054832] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.055080] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.148442] env[62627]: DEBUG nova.compute.utils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1117.149910] env[62627]: DEBUG nova.compute.manager [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1117.150110] env[62627]: DEBUG nova.network.neutron [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1117.195031] env[62627]: DEBUG nova.policy [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ade098da576147a6bff6685cb886ff78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a97f57bdf4a46db80460b1d5c9f9b9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1117.311935] env[62627]: DEBUG nova.network.neutron [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1117.379057] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1e40e2-28f5-447d-bd65-11d8f4970343 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.387021] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49f1810-aa15-43e6-9590-270b5eeabc63 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.418794] env[62627]: DEBUG oslo_concurrency.lockutils [req-5fd34c13-997a-4146-b0bc-01a459fed63b req-81c5b187-1074-4e57-bf87-a1733507011b service nova] Releasing lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.419887] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d94109-5fa4-44e7-9833-cf80567f5170 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.427229] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f188b2-2375-4d7f-ac64-04029aa6114f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.442733] env[62627]: DEBUG nova.compute.provider_tree [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.470410] env[62627]: DEBUG nova.network.neutron [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Updating instance_info_cache with network_info: [{"id": "e953f345-dc88-46fa-bee5-036a4f2d9b4c", "address": "fa:16:3e:42:6d:60", "network": {"id": "c31c74e2-2494-4962-adac-077dd1f83f30", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1481148106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98810a7e17cc4103871e1779a2bd4c77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape953f345-dc", "ovs_interfaceid": "e953f345-dc88-46fa-bee5-036a4f2d9b4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.556114] env[62627]: DEBUG nova.network.neutron [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Successfully created port: 291e9d98-ac72-43f7-b745-9f0d290580fa {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1117.559336] env[62627]: DEBUG nova.compute.manager [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1117.653856] env[62627]: DEBUG nova.compute.manager [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1117.710029] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1117.710181] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e3b797c-f032-4faa-ba9e-9d5f91aef82e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.720263] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1117.720263] env[62627]: value = "task-2194965" [ 1117.720263] env[62627]: _type = "Task" [ 1117.720263] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.734340] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194965, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.946647] env[62627]: DEBUG nova.scheduler.client.report [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1117.975805] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Releasing lock "refresh_cache-c0e4ea33-2397-4716-8f6a-603fe099fdc5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.976338] env[62627]: DEBUG nova.compute.manager [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Instance network_info: |[{"id": "e953f345-dc88-46fa-bee5-036a4f2d9b4c", "address": "fa:16:3e:42:6d:60", "network": {"id": "c31c74e2-2494-4962-adac-077dd1f83f30", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1481148106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98810a7e17cc4103871e1779a2bd4c77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape953f345-dc", "ovs_interfaceid": "e953f345-dc88-46fa-bee5-036a4f2d9b4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1117.976940] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:6d:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e953f345-dc88-46fa-bee5-036a4f2d9b4c', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.986779] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Creating folder: Project (98810a7e17cc4103871e1779a2bd4c77). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1117.987103] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d1a1d90-7625-4596-b242-d677a9cc883a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.998763] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Created folder: Project (98810a7e17cc4103871e1779a2bd4c77) in parent group-v447541. [ 1117.998991] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Creating folder: Instances. Parent ref: group-v447768. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1117.999281] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a86e54a0-00d5-4480-b531-d69d7abe895a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.008320] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Created folder: Instances in parent group-v447768. [ 1118.008637] env[62627]: DEBUG oslo.service.loopingcall [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1118.008915] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1118.009244] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-545bd106-758e-4c57-9b4b-af8da9e719ed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.032502] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1118.032502] env[62627]: value = "task-2194968" [ 1118.032502] env[62627]: _type = "Task" [ 1118.032502] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.039895] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194968, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.081805] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.183663] env[62627]: DEBUG nova.compute.manager [req-d6d371d4-7768-494f-b110-d868ec195214 req-27fc1171-6cc1-4e6b-8f0e-1225bdc1a2fe service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Received event network-changed-62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1118.183862] env[62627]: DEBUG nova.compute.manager [req-d6d371d4-7768-494f-b110-d868ec195214 req-27fc1171-6cc1-4e6b-8f0e-1225bdc1a2fe service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Refreshing instance network info cache due to event network-changed-62b171c6-c991-4816-86e3-ba8a0862f911. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1118.184096] env[62627]: DEBUG oslo_concurrency.lockutils [req-d6d371d4-7768-494f-b110-d868ec195214 req-27fc1171-6cc1-4e6b-8f0e-1225bdc1a2fe service nova] Acquiring lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.184240] env[62627]: DEBUG oslo_concurrency.lockutils [req-d6d371d4-7768-494f-b110-d868ec195214 req-27fc1171-6cc1-4e6b-8f0e-1225bdc1a2fe service nova] Acquired lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.184403] env[62627]: DEBUG nova.network.neutron [req-d6d371d4-7768-494f-b110-d868ec195214 req-27fc1171-6cc1-4e6b-8f0e-1225bdc1a2fe service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Refreshing network info cache for port 62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1118.214517] env[62627]: DEBUG nova.compute.manager [req-40e75b6d-2001-4265-9345-c2da3110985d req-b9e53454-4f97-48c2-ab24-f56d986bd38a service nova] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Received event network-changed-e953f345-dc88-46fa-bee5-036a4f2d9b4c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1118.214774] env[62627]: DEBUG nova.compute.manager [req-40e75b6d-2001-4265-9345-c2da3110985d req-b9e53454-4f97-48c2-ab24-f56d986bd38a service nova] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Refreshing instance network info cache due to event network-changed-e953f345-dc88-46fa-bee5-036a4f2d9b4c. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1118.214910] env[62627]: DEBUG oslo_concurrency.lockutils [req-40e75b6d-2001-4265-9345-c2da3110985d req-b9e53454-4f97-48c2-ab24-f56d986bd38a service nova] Acquiring lock "refresh_cache-c0e4ea33-2397-4716-8f6a-603fe099fdc5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.215063] env[62627]: DEBUG oslo_concurrency.lockutils [req-40e75b6d-2001-4265-9345-c2da3110985d req-b9e53454-4f97-48c2-ab24-f56d986bd38a service nova] Acquired lock "refresh_cache-c0e4ea33-2397-4716-8f6a-603fe099fdc5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.215233] env[62627]: DEBUG nova.network.neutron [req-40e75b6d-2001-4265-9345-c2da3110985d req-b9e53454-4f97-48c2-ab24-f56d986bd38a service nova] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Refreshing network info cache for port e953f345-dc88-46fa-bee5-036a4f2d9b4c {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1118.230717] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194965, 'name': PowerOffVM_Task, 'duration_secs': 0.262888} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.230975] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1118.231225] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1118.231990] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2add246a-764e-472d-b4f2-7e9dbe98599d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.239174] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1118.239385] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37bcb44d-14ce-4494-8ae7-2f60b4ef3e7f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.293614] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1118.293773] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1118.294010] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleting the datastore file [datastore2] 9e0693d9-077a-4a02-b1c5-40fdddcdd27d {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1118.294273] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ebb79b9-965c-4c9f-9b5a-83274775d93f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.300456] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1118.300456] env[62627]: value = "task-2194970" [ 1118.300456] env[62627]: _type = "Task" [ 1118.300456] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.308009] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194970, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.451733] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.809s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.454401] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.285s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.455853] env[62627]: INFO nova.compute.claims [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1118.473352] env[62627]: INFO nova.scheduler.client.report [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted allocations for instance 0ee04bf9-6aa6-485c-a979-191d52af236d [ 1118.543424] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194968, 'name': CreateVM_Task, 'duration_secs': 0.42632} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.543645] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1118.544969] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.545210] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.545761] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1118.546227] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56d2487e-bdf8-48ea-868e-0b88e3e4e1ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.551158] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for the task: (returnval){ [ 1118.551158] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52af492c-4b6a-4af7-e4de-212f8ecbd932" [ 1118.551158] env[62627]: _type = "Task" [ 1118.551158] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.561777] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52af492c-4b6a-4af7-e4de-212f8ecbd932, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.665894] env[62627]: DEBUG nova.compute.manager [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1118.692365] env[62627]: DEBUG nova.virt.hardware [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1118.693077] env[62627]: DEBUG nova.virt.hardware [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1118.693258] env[62627]: DEBUG nova.virt.hardware [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1118.693824] env[62627]: DEBUG nova.virt.hardware [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1118.693824] env[62627]: DEBUG nova.virt.hardware [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1118.693824] env[62627]: DEBUG nova.virt.hardware [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1118.694043] env[62627]: DEBUG nova.virt.hardware [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1118.694132] env[62627]: DEBUG nova.virt.hardware [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1118.694565] env[62627]: DEBUG nova.virt.hardware [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1118.694565] env[62627]: DEBUG nova.virt.hardware [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1118.694743] env[62627]: DEBUG nova.virt.hardware [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1118.695650] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36478158-9c6b-4402-b540-be3454a93570 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.704950] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ade54d7-7751-412a-adb7-64eef58f2905 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.812337] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194970, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203976} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.812570] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1118.812756] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1118.812926] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1118.980610] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c389e02a-f21d-4da9-ad55-b2a342771dcd tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "0ee04bf9-6aa6-485c-a979-191d52af236d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.719s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1119.047949] env[62627]: DEBUG nova.network.neutron [req-40e75b6d-2001-4265-9345-c2da3110985d req-b9e53454-4f97-48c2-ab24-f56d986bd38a service nova] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Updated VIF entry in instance network info cache for port e953f345-dc88-46fa-bee5-036a4f2d9b4c. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1119.048329] env[62627]: DEBUG nova.network.neutron [req-40e75b6d-2001-4265-9345-c2da3110985d req-b9e53454-4f97-48c2-ab24-f56d986bd38a service nova] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Updating instance_info_cache with network_info: [{"id": "e953f345-dc88-46fa-bee5-036a4f2d9b4c", "address": "fa:16:3e:42:6d:60", "network": {"id": "c31c74e2-2494-4962-adac-077dd1f83f30", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1481148106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98810a7e17cc4103871e1779a2bd4c77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape953f345-dc", "ovs_interfaceid": "e953f345-dc88-46fa-bee5-036a4f2d9b4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.060959] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52af492c-4b6a-4af7-e4de-212f8ecbd932, 'name': SearchDatastore_Task, 'duration_secs': 0.010429} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.062782] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.063078] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1119.063323] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1119.063470] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.063643] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1119.064169] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19a65a63-f5ab-4577-8563-976e72d92e7c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.067163] env[62627]: DEBUG nova.network.neutron [req-d6d371d4-7768-494f-b110-d868ec195214 req-27fc1171-6cc1-4e6b-8f0e-1225bdc1a2fe service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updated VIF entry in instance network info cache for port 62b171c6-c991-4816-86e3-ba8a0862f911. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1119.067658] env[62627]: DEBUG nova.network.neutron [req-d6d371d4-7768-494f-b110-d868ec195214 req-27fc1171-6cc1-4e6b-8f0e-1225bdc1a2fe service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updating instance_info_cache with network_info: [{"id": "62b171c6-c991-4816-86e3-ba8a0862f911", "address": "fa:16:3e:d7:1a:59", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62b171c6-c9", "ovs_interfaceid": "62b171c6-c991-4816-86e3-ba8a0862f911", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.075096] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1119.075366] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1119.076015] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe90c9aa-d889-488d-ac4a-c9f11598d9b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.085513] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for the task: (returnval){ [ 1119.085513] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52459f38-59cc-383b-669c-f7681852d8ba" [ 1119.085513] env[62627]: _type = "Task" [ 1119.085513] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.093562] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52459f38-59cc-383b-669c-f7681852d8ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.118754] env[62627]: DEBUG nova.network.neutron [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Successfully updated port: 291e9d98-ac72-43f7-b745-9f0d290580fa {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1119.552855] env[62627]: DEBUG oslo_concurrency.lockutils [req-40e75b6d-2001-4265-9345-c2da3110985d req-b9e53454-4f97-48c2-ab24-f56d986bd38a service nova] Releasing lock "refresh_cache-c0e4ea33-2397-4716-8f6a-603fe099fdc5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.570406] env[62627]: DEBUG oslo_concurrency.lockutils [req-d6d371d4-7768-494f-b110-d868ec195214 req-27fc1171-6cc1-4e6b-8f0e-1225bdc1a2fe service nova] Releasing lock "refresh_cache-457e5aa6-d229-4f48-8817-e918ec4bd0f9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.596120] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52459f38-59cc-383b-669c-f7681852d8ba, 'name': SearchDatastore_Task, 'duration_secs': 0.009047} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.596939] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a01f9ef3-6ef6-4606-8f82-1f81f4779975 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.603281] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for the task: (returnval){ [ 1119.603281] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52082215-aed2-42a0-5670-caf8a9dd1afc" [ 1119.603281] env[62627]: _type = "Task" [ 1119.603281] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.610677] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52082215-aed2-42a0-5670-caf8a9dd1afc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.621165] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "refresh_cache-f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1119.621362] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "refresh_cache-f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.621453] env[62627]: DEBUG nova.network.neutron [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1119.635034] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f385388-fd10-4b74-9466-22c880c4fe3a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.643040] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb577c5-6708-4565-9cab-b04f38d916c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.674595] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cddd55c-343e-483d-bdea-fd1eceec4de8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.681778] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fcb27d-60ce-4f52-936b-481ab0be5788 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.695417] env[62627]: DEBUG nova.compute.provider_tree [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.843440] env[62627]: DEBUG nova.virt.hardware [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1119.843703] env[62627]: DEBUG nova.virt.hardware [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1119.843863] env[62627]: DEBUG nova.virt.hardware [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1119.844058] env[62627]: DEBUG nova.virt.hardware [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1119.844213] env[62627]: DEBUG nova.virt.hardware [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1119.844363] env[62627]: DEBUG nova.virt.hardware [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1119.844568] env[62627]: DEBUG nova.virt.hardware [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1119.844727] env[62627]: DEBUG nova.virt.hardware [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1119.844889] env[62627]: DEBUG nova.virt.hardware [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1119.845077] env[62627]: DEBUG nova.virt.hardware [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1119.845279] env[62627]: DEBUG nova.virt.hardware [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1119.846138] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c524a3-0e9c-4001-83a1-52a1e9d36b57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.853884] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e24a129-c80b-4720-bebc-295ad76eb168 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.866797] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:73:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '098df9b7-d759-47f7-b756-334848cb423b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '880f6f52-e5e1-4cef-ac19-65931c8f46a6', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1119.873971] env[62627]: DEBUG oslo.service.loopingcall [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1119.874216] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1119.874413] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cc8ccb3-3d77-4078-b8f5-374977368885 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.894016] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1119.894016] env[62627]: value = "task-2194971" [ 1119.894016] env[62627]: _type = "Task" [ 1119.894016] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.902245] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194971, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.025411] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.025567] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.113846] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52082215-aed2-42a0-5670-caf8a9dd1afc, 'name': SearchDatastore_Task, 'duration_secs': 0.012299} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.114195] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1120.114507] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] c0e4ea33-2397-4716-8f6a-603fe099fdc5/c0e4ea33-2397-4716-8f6a-603fe099fdc5.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1120.114966] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-188a616f-9441-484f-b0b4-98e896084714 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.122634] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for the task: (returnval){ [ 1120.122634] env[62627]: value = "task-2194972" [ 1120.122634] env[62627]: _type = "Task" [ 1120.122634] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.132688] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194972, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.157953] env[62627]: DEBUG nova.network.neutron [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1120.200388] env[62627]: DEBUG nova.scheduler.client.report [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.288263] env[62627]: DEBUG nova.compute.manager [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Received event network-vif-plugged-291e9d98-ac72-43f7-b745-9f0d290580fa {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1120.288263] env[62627]: DEBUG oslo_concurrency.lockutils [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] Acquiring lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.288263] env[62627]: DEBUG oslo_concurrency.lockutils [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.288263] env[62627]: DEBUG oslo_concurrency.lockutils [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.288263] env[62627]: DEBUG nova.compute.manager [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] No waiting events found dispatching network-vif-plugged-291e9d98-ac72-43f7-b745-9f0d290580fa {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1120.289155] env[62627]: WARNING nova.compute.manager [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Received unexpected event network-vif-plugged-291e9d98-ac72-43f7-b745-9f0d290580fa for instance with vm_state building and task_state spawning. [ 1120.289532] env[62627]: DEBUG nova.compute.manager [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Received event network-changed-291e9d98-ac72-43f7-b745-9f0d290580fa {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1120.289905] env[62627]: DEBUG nova.compute.manager [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Refreshing instance network info cache due to event network-changed-291e9d98-ac72-43f7-b745-9f0d290580fa. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1120.290259] env[62627]: DEBUG oslo_concurrency.lockutils [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] Acquiring lock "refresh_cache-f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.345829] env[62627]: DEBUG nova.network.neutron [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Updating instance_info_cache with network_info: [{"id": "291e9d98-ac72-43f7-b745-9f0d290580fa", "address": "fa:16:3e:c2:63:41", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap291e9d98-ac", "ovs_interfaceid": "291e9d98-ac72-43f7-b745-9f0d290580fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.405811] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194971, 'name': CreateVM_Task, 'duration_secs': 0.304652} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.406016] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1120.406749] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.406938] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.407294] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1120.407586] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71eb729f-ceb5-4a0d-a143-616c174618f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.414880] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1120.414880] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fbd192-7caa-a7ae-2d9e-63d72ccf9112" [ 1120.414880] env[62627]: _type = "Task" [ 1120.414880] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.425808] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fbd192-7caa-a7ae-2d9e-63d72ccf9112, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.528795] env[62627]: DEBUG nova.compute.manager [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1120.633795] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194972, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502978} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.634202] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] c0e4ea33-2397-4716-8f6a-603fe099fdc5/c0e4ea33-2397-4716-8f6a-603fe099fdc5.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1120.634583] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1120.634789] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27cf132d-a3dd-4cbd-970f-35a74eba7d10 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.641587] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for the task: (returnval){ [ 1120.641587] env[62627]: value = "task-2194973" [ 1120.641587] env[62627]: _type = "Task" [ 1120.641587] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.650205] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194973, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.705990] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.252s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.706723] env[62627]: DEBUG nova.compute.manager [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1120.709253] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.431s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.710630] env[62627]: INFO nova.compute.claims [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1120.850121] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "refresh_cache-f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1120.850298] env[62627]: DEBUG nova.compute.manager [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Instance network_info: |[{"id": "291e9d98-ac72-43f7-b745-9f0d290580fa", "address": "fa:16:3e:c2:63:41", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap291e9d98-ac", "ovs_interfaceid": "291e9d98-ac72-43f7-b745-9f0d290580fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1120.850637] env[62627]: DEBUG oslo_concurrency.lockutils [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] Acquired lock "refresh_cache-f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.850822] env[62627]: DEBUG nova.network.neutron [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Refreshing network info cache for port 291e9d98-ac72-43f7-b745-9f0d290580fa {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1120.852363] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:63:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f499bc9-78da-46c1-9274-19edf26d31cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '291e9d98-ac72-43f7-b745-9f0d290580fa', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1120.859901] env[62627]: DEBUG oslo.service.loopingcall [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1120.862882] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1120.863358] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c561654d-ac30-469a-90cb-4dbc5a45e0a4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.883694] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1120.883694] env[62627]: value = "task-2194974" [ 1120.883694] env[62627]: _type = "Task" [ 1120.883694] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.891972] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194974, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.926588] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fbd192-7caa-a7ae-2d9e-63d72ccf9112, 'name': SearchDatastore_Task, 'duration_secs': 0.060132} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.926932] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1120.927210] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1120.927456] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.927603] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.927784] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1120.928101] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17129392-bbfe-472a-ab7d-66c39298f416 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.939136] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1120.939364] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1120.940188] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0631a96c-183c-4054-88ce-bd774ce17450 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.946438] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1120.946438] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52faecb7-acb3-4a79-aa85-bf63433bf77f" [ 1120.946438] env[62627]: _type = "Task" [ 1120.946438] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.955294] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52faecb7-acb3-4a79-aa85-bf63433bf77f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.060430] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.152941] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194973, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054987} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.152941] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1121.153443] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4092455-f305-4c95-9b0d-c44828450465 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.156806] env[62627]: DEBUG nova.network.neutron [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Updated VIF entry in instance network info cache for port 291e9d98-ac72-43f7-b745-9f0d290580fa. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1121.157159] env[62627]: DEBUG nova.network.neutron [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Updating instance_info_cache with network_info: [{"id": "291e9d98-ac72-43f7-b745-9f0d290580fa", "address": "fa:16:3e:c2:63:41", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap291e9d98-ac", "ovs_interfaceid": "291e9d98-ac72-43f7-b745-9f0d290580fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.179335] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] c0e4ea33-2397-4716-8f6a-603fe099fdc5/c0e4ea33-2397-4716-8f6a-603fe099fdc5.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.180275] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e1578a1-93dd-44d0-bb8f-7b8a7d336099 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.200115] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for the task: (returnval){ [ 1121.200115] env[62627]: value = "task-2194975" [ 1121.200115] env[62627]: _type = "Task" [ 1121.200115] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.211057] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194975, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.214958] env[62627]: DEBUG nova.compute.utils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1121.218299] env[62627]: DEBUG nova.compute.manager [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1121.218432] env[62627]: DEBUG nova.network.neutron [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1121.272372] env[62627]: DEBUG nova.policy [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a6859cc99dc042499d543abf04135ef4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e00bc84b05714dfc801b13eb2aae734d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1121.396482] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194974, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.457203] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52faecb7-acb3-4a79-aa85-bf63433bf77f, 'name': SearchDatastore_Task, 'duration_secs': 0.012085} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.458142] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09253ca1-ad8c-4b39-b829-454ba98e4421 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.464778] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1121.464778] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52faa4ae-3e08-1b85-35f2-0027e10e5841" [ 1121.464778] env[62627]: _type = "Task" [ 1121.464778] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.472725] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52faa4ae-3e08-1b85-35f2-0027e10e5841, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.634897] env[62627]: DEBUG nova.network.neutron [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Successfully created port: 772cf7cb-3d85-4d24-a40d-d294b5f5c582 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1121.659770] env[62627]: DEBUG oslo_concurrency.lockutils [req-79407c4d-320a-4fc9-ab31-ffa910dc2afe req-a7fe2a96-424a-4d56-9cea-972f0ea0387d service nova] Releasing lock "refresh_cache-f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1121.710772] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194975, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.712318] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquiring lock "ae4ec050-5062-4e57-8090-f67f8c8013ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.712590] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Lock "ae4ec050-5062-4e57-8090-f67f8c8013ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.712785] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquiring lock "ae4ec050-5062-4e57-8090-f67f8c8013ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.712964] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Lock "ae4ec050-5062-4e57-8090-f67f8c8013ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.713364] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Lock "ae4ec050-5062-4e57-8090-f67f8c8013ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.715756] env[62627]: INFO nova.compute.manager [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Terminating instance [ 1121.718318] env[62627]: DEBUG nova.compute.manager [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1121.896076] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194974, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.929557] env[62627]: DEBUG nova.compute.manager [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1121.930447] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b234abca-4021-4f3c-80af-b88473555837 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.934436] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05e0831-62d0-473e-8913-0721718ae9e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.946133] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8d35d5-e9e0-4e57-84a4-d46873ad2c0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.981284] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1e55c5-d308-4efd-9568-2e4a763c5f4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.989267] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52faa4ae-3e08-1b85-35f2-0027e10e5841, 'name': SearchDatastore_Task, 'duration_secs': 0.010625} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.991277] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1121.991541] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 9e0693d9-077a-4a02-b1c5-40fdddcdd27d/9e0693d9-077a-4a02-b1c5-40fdddcdd27d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1121.991826] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e186d2cf-2fe1-4c39-91df-0dda13931d94 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.994496] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4324cc5-6b78-476c-8e1b-2dfcfd12dc2f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.008533] env[62627]: DEBUG nova.compute.provider_tree [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.010713] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1122.010713] env[62627]: value = "task-2194976" [ 1122.010713] env[62627]: _type = "Task" [ 1122.010713] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.017982] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194976, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.210897] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194975, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.221836] env[62627]: DEBUG nova.compute.manager [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1122.222089] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1122.223012] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0553a7d-dec3-4d62-a094-ee3678cbb298 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.234747] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1122.235057] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2dd6d72-b650-49fe-bf74-268af1e13ab6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.241590] env[62627]: DEBUG oslo_vmware.api [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for the task: (returnval){ [ 1122.241590] env[62627]: value = "task-2194977" [ 1122.241590] env[62627]: _type = "Task" [ 1122.241590] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.249882] env[62627]: DEBUG oslo_vmware.api [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194977, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.400193] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194974, 'name': CreateVM_Task, 'duration_secs': 1.327828} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.400449] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1122.401084] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1122.401293] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.401721] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1122.401987] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e0bd084-ad29-47e1-85eb-5140b376e174 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.407957] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1122.407957] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b54ed0-a25a-6448-6d87-dc04843da3ba" [ 1122.407957] env[62627]: _type = "Task" [ 1122.407957] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.417672] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b54ed0-a25a-6448-6d87-dc04843da3ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.451551] env[62627]: INFO nova.compute.manager [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] instance snapshotting [ 1122.451948] env[62627]: DEBUG nova.objects.instance [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'flavor' on Instance uuid d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1122.512067] env[62627]: DEBUG nova.scheduler.client.report [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.524424] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194976, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.711012] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194975, 'name': ReconfigVM_Task, 'duration_secs': 1.404371} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.711335] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Reconfigured VM instance instance-00000055 to attach disk [datastore2] c0e4ea33-2397-4716-8f6a-603fe099fdc5/c0e4ea33-2397-4716-8f6a-603fe099fdc5.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.711899] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a3e4b3c-5155-4c61-9dab-02f5dd765c26 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.719501] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for the task: (returnval){ [ 1122.719501] env[62627]: value = "task-2194978" [ 1122.719501] env[62627]: _type = "Task" [ 1122.719501] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.727116] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194978, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.731197] env[62627]: DEBUG nova.compute.manager [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1122.751624] env[62627]: DEBUG oslo_vmware.api [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194977, 'name': PowerOffVM_Task, 'duration_secs': 0.20772} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.753561] env[62627]: DEBUG nova.virt.hardware [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1122.753783] env[62627]: DEBUG nova.virt.hardware [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1122.753940] env[62627]: DEBUG nova.virt.hardware [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1122.754139] env[62627]: DEBUG nova.virt.hardware [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1122.754286] env[62627]: DEBUG nova.virt.hardware [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1122.754434] env[62627]: DEBUG nova.virt.hardware [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1122.754685] env[62627]: DEBUG nova.virt.hardware [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1122.754846] env[62627]: DEBUG nova.virt.hardware [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1122.755023] env[62627]: DEBUG nova.virt.hardware [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1122.755190] env[62627]: DEBUG nova.virt.hardware [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1122.755361] env[62627]: DEBUG nova.virt.hardware [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1122.755639] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1122.755790] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1122.756485] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea2f053-b019-4a21-88f0-6bb490e9997c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.758893] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d55e3eee-261a-4926-8a9b-34ceb0c9a1e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.765190] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851ad1c3-9a78-49fc-9eab-6634b8475a8f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.816722] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1122.816995] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1122.817199] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Deleting the datastore file [datastore1] ae4ec050-5062-4e57-8090-f67f8c8013ff {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.817730] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a03d8525-42a3-43b7-b23c-c7db6a22ad55 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.825469] env[62627]: DEBUG oslo_vmware.api [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for the task: (returnval){ [ 1122.825469] env[62627]: value = "task-2194980" [ 1122.825469] env[62627]: _type = "Task" [ 1122.825469] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.833528] env[62627]: DEBUG oslo_vmware.api [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194980, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.918906] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b54ed0-a25a-6448-6d87-dc04843da3ba, 'name': SearchDatastore_Task, 'duration_secs': 0.054011} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.919295] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.919567] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1122.919828] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1122.920012] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.920217] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1122.920487] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14333b8f-e950-430d-9553-1056b678a617 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.929954] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1122.930167] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1122.931312] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae06346d-dd7a-4faf-a88e-b3422e643355 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.937108] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1122.937108] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528b1487-6acd-9b27-8c02-1c7214830f33" [ 1122.937108] env[62627]: _type = "Task" [ 1122.937108] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.944549] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528b1487-6acd-9b27-8c02-1c7214830f33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.958462] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c1fd9f-e8f2-46e0-a2b6-71563ba00411 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.979087] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c35f8a6-f069-41b7-bdca-c36a1b656a6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.020782] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.311s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.021318] env[62627]: DEBUG nova.compute.manager [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1123.023897] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.942s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.025385] env[62627]: INFO nova.compute.claims [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1123.033077] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194976, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526787} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.033361] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 9e0693d9-077a-4a02-b1c5-40fdddcdd27d/9e0693d9-077a-4a02-b1c5-40fdddcdd27d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1123.033693] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1123.033967] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9bc3ac62-3ffe-4e70-9237-f2abbf8f3003 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.040353] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1123.040353] env[62627]: value = "task-2194981" [ 1123.040353] env[62627]: _type = "Task" [ 1123.040353] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.048312] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194981, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.102603] env[62627]: DEBUG nova.compute.manager [req-d0049419-2515-4607-bd61-73e9a4b951c4 req-c33ee2d5-a779-4af5-9eb0-2bd6fbac2d28 service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Received event network-vif-plugged-772cf7cb-3d85-4d24-a40d-d294b5f5c582 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1123.102869] env[62627]: DEBUG oslo_concurrency.lockutils [req-d0049419-2515-4607-bd61-73e9a4b951c4 req-c33ee2d5-a779-4af5-9eb0-2bd6fbac2d28 service nova] Acquiring lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.103124] env[62627]: DEBUG oslo_concurrency.lockutils [req-d0049419-2515-4607-bd61-73e9a4b951c4 req-c33ee2d5-a779-4af5-9eb0-2bd6fbac2d28 service nova] Lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.103333] env[62627]: DEBUG oslo_concurrency.lockutils [req-d0049419-2515-4607-bd61-73e9a4b951c4 req-c33ee2d5-a779-4af5-9eb0-2bd6fbac2d28 service nova] Lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.103539] env[62627]: DEBUG nova.compute.manager [req-d0049419-2515-4607-bd61-73e9a4b951c4 req-c33ee2d5-a779-4af5-9eb0-2bd6fbac2d28 service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] No waiting events found dispatching network-vif-plugged-772cf7cb-3d85-4d24-a40d-d294b5f5c582 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1123.103736] env[62627]: WARNING nova.compute.manager [req-d0049419-2515-4607-bd61-73e9a4b951c4 req-c33ee2d5-a779-4af5-9eb0-2bd6fbac2d28 service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Received unexpected event network-vif-plugged-772cf7cb-3d85-4d24-a40d-d294b5f5c582 for instance with vm_state building and task_state spawning. [ 1123.209939] env[62627]: DEBUG nova.network.neutron [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Successfully updated port: 772cf7cb-3d85-4d24-a40d-d294b5f5c582 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1123.230423] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194978, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.335837] env[62627]: DEBUG oslo_vmware.api [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Task: {'id': task-2194980, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164433} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.336107] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1123.336358] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1123.336473] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1123.336720] env[62627]: INFO nova.compute.manager [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1123.336985] env[62627]: DEBUG oslo.service.loopingcall [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1123.337200] env[62627]: DEBUG nova.compute.manager [-] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1123.337352] env[62627]: DEBUG nova.network.neutron [-] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1123.448714] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528b1487-6acd-9b27-8c02-1c7214830f33, 'name': SearchDatastore_Task, 'duration_secs': 0.009281} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.449669] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e5393ee-24be-4c9d-b641-aba858423779 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.455311] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1123.455311] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c67560-4c3e-063f-3bde-1f2569516f36" [ 1123.455311] env[62627]: _type = "Task" [ 1123.455311] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.463598] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c67560-4c3e-063f-3bde-1f2569516f36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.490549] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1123.490865] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1b93183a-9007-4eb9-8d77-41b8ca32465b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.497942] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1123.497942] env[62627]: value = "task-2194982" [ 1123.497942] env[62627]: _type = "Task" [ 1123.497942] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.506411] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194982, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.530013] env[62627]: DEBUG nova.compute.utils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1123.534588] env[62627]: DEBUG nova.compute.manager [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1123.534588] env[62627]: DEBUG nova.network.neutron [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1123.550811] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194981, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.304524} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.551188] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1123.551862] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7edb488-5306-43f2-a5e6-dfd8ff60a93d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.577771] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 9e0693d9-077a-4a02-b1c5-40fdddcdd27d/9e0693d9-077a-4a02-b1c5-40fdddcdd27d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1123.579659] env[62627]: DEBUG nova.policy [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9acb109fb0c046b2b2e6dfc595dd5c07', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97d5055e1a674886a161a96b0e095451', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1123.581168] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fad4d05-11f3-4c18-8b23-1ffce33411f3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.601835] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1123.601835] env[62627]: value = "task-2194983" [ 1123.601835] env[62627]: _type = "Task" [ 1123.601835] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.610799] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194983, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.716244] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquiring lock "refresh_cache-b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.716244] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquired lock "refresh_cache-b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.716244] env[62627]: DEBUG nova.network.neutron [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1123.731375] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194978, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.967460] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c67560-4c3e-063f-3bde-1f2569516f36, 'name': SearchDatastore_Task, 'duration_secs': 0.048466} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.967746] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1123.968059] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1/f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1123.968343] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-843685d2-570d-45ba-b3c8-0108d354b0e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.975718] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1123.975718] env[62627]: value = "task-2194984" [ 1123.975718] env[62627]: _type = "Task" [ 1123.975718] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.989374] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.009084] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194982, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.035375] env[62627]: DEBUG nova.compute.manager [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1124.060503] env[62627]: DEBUG nova.network.neutron [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Successfully created port: 823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1124.114014] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.235495] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194978, 'name': Rename_Task, 'duration_secs': 1.086423} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.235786] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1124.236057] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66fbdaa3-f7ff-4b94-adcd-4e07050af557 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.243803] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for the task: (returnval){ [ 1124.243803] env[62627]: value = "task-2194985" [ 1124.243803] env[62627]: _type = "Task" [ 1124.243803] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.255184] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194985, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.275079] env[62627]: DEBUG nova.network.neutron [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1124.345486] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3114ca-b157-4b23-b44d-605960832871 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.356124] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a16ba87-0b40-474f-80b0-4ec2c35569dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.405373] env[62627]: DEBUG nova.network.neutron [-] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.409094] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ebf404-25fc-4b92-94dc-33fff6e2d693 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.423432] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845290ed-d616-48bb-81cb-c38e34b80b7d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.442412] env[62627]: DEBUG nova.compute.provider_tree [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1124.486927] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194984, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.504232] env[62627]: DEBUG nova.network.neutron [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Updating instance_info_cache with network_info: [{"id": "772cf7cb-3d85-4d24-a40d-d294b5f5c582", "address": "fa:16:3e:00:8a:34", "network": {"id": "8a85d360-0b68-4b33-bc55-3570ed7a6337", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1997747330-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e00bc84b05714dfc801b13eb2aae734d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772cf7cb-3d", "ovs_interfaceid": "772cf7cb-3d85-4d24-a40d-d294b5f5c582", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.512326] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194982, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.613328] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194983, 'name': ReconfigVM_Task, 'duration_secs': 0.733337} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.613614] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 9e0693d9-077a-4a02-b1c5-40fdddcdd27d/9e0693d9-077a-4a02-b1c5-40fdddcdd27d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1124.614271] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd81ca79-0807-4398-834c-0d68bc3715e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.621692] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1124.621692] env[62627]: value = "task-2194986" [ 1124.621692] env[62627]: _type = "Task" [ 1124.621692] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.632944] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194986, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.755112] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194985, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.913279] env[62627]: INFO nova.compute.manager [-] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Took 1.58 seconds to deallocate network for instance. [ 1124.982292] env[62627]: ERROR nova.scheduler.client.report [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [req-23b6c84c-451e-47b5-ac50-fa1b18152cae] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-23b6c84c-451e-47b5-ac50-fa1b18152cae"}]} [ 1124.990139] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194984, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656374} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.990405] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1/f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1124.990608] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1124.990869] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d36e9863-a7cd-48dc-b801-4cce5e3b8ee2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.999120] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1124.999120] env[62627]: value = "task-2194987" [ 1124.999120] env[62627]: _type = "Task" [ 1124.999120] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.004671] env[62627]: DEBUG nova.scheduler.client.report [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1125.007414] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Releasing lock "refresh_cache-b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1125.007798] env[62627]: DEBUG nova.compute.manager [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Instance network_info: |[{"id": "772cf7cb-3d85-4d24-a40d-d294b5f5c582", "address": "fa:16:3e:00:8a:34", "network": {"id": "8a85d360-0b68-4b33-bc55-3570ed7a6337", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1997747330-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e00bc84b05714dfc801b13eb2aae734d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772cf7cb-3d", "ovs_interfaceid": "772cf7cb-3d85-4d24-a40d-d294b5f5c582", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1125.011686] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:8a:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1da5fc2-0280-4f76-ac97-20ea4bc7bb16', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '772cf7cb-3d85-4d24-a40d-d294b5f5c582', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1125.018916] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Creating folder: Project (e00bc84b05714dfc801b13eb2aae734d). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1125.023882] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df984925-74e9-4600-b713-d2fd59c0bbad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.025534] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194987, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.026485] env[62627]: DEBUG nova.scheduler.client.report [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1125.026711] env[62627]: DEBUG nova.compute.provider_tree [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1125.034756] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194982, 'name': CreateSnapshot_Task, 'duration_secs': 1.34948} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.035037] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1125.035794] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305786e8-19dc-4c03-8fbe-a7a5ed37df61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.039853] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Created folder: Project (e00bc84b05714dfc801b13eb2aae734d) in parent group-v447541. [ 1125.040135] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Creating folder: Instances. Parent ref: group-v447774. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1125.041176] env[62627]: DEBUG nova.scheduler.client.report [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1125.043652] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0170e108-ddc7-4d00-b42a-e37dccc12ca4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.051612] env[62627]: DEBUG nova.compute.manager [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1125.064821] env[62627]: DEBUG nova.scheduler.client.report [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1125.069125] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Created folder: Instances in parent group-v447774. [ 1125.069515] env[62627]: DEBUG oslo.service.loopingcall [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1125.071395] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1125.071846] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-239d9ea2-60bc-4c43-82fc-49cde5e0286c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.091058] env[62627]: DEBUG nova.virt.hardware [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1125.091332] env[62627]: DEBUG nova.virt.hardware [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1125.091486] env[62627]: DEBUG nova.virt.hardware [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1125.091667] env[62627]: DEBUG nova.virt.hardware [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1125.091815] env[62627]: DEBUG nova.virt.hardware [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1125.091964] env[62627]: DEBUG nova.virt.hardware [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1125.092192] env[62627]: DEBUG nova.virt.hardware [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1125.092350] env[62627]: DEBUG nova.virt.hardware [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1125.092515] env[62627]: DEBUG nova.virt.hardware [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1125.092673] env[62627]: DEBUG nova.virt.hardware [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1125.092845] env[62627]: DEBUG nova.virt.hardware [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1125.094317] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6d6e94-daed-4da9-bbc8-e3a2da9f31b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.104214] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c170c00-9303-4f61-96f8-f766a686f958 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.108578] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1125.108578] env[62627]: value = "task-2194990" [ 1125.108578] env[62627]: _type = "Task" [ 1125.108578] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.130987] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194990, 'name': CreateVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.135937] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194986, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.138902] env[62627]: DEBUG nova.compute.manager [req-c12dbb86-1c37-497d-ba9e-a57bbb991177 req-bfcbef31-d2c9-4aca-aa1e-f22bb4fd83bb service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Received event network-changed-772cf7cb-3d85-4d24-a40d-d294b5f5c582 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1125.139154] env[62627]: DEBUG nova.compute.manager [req-c12dbb86-1c37-497d-ba9e-a57bbb991177 req-bfcbef31-d2c9-4aca-aa1e-f22bb4fd83bb service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Refreshing instance network info cache due to event network-changed-772cf7cb-3d85-4d24-a40d-d294b5f5c582. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1125.139314] env[62627]: DEBUG oslo_concurrency.lockutils [req-c12dbb86-1c37-497d-ba9e-a57bbb991177 req-bfcbef31-d2c9-4aca-aa1e-f22bb4fd83bb service nova] Acquiring lock "refresh_cache-b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1125.139475] env[62627]: DEBUG oslo_concurrency.lockutils [req-c12dbb86-1c37-497d-ba9e-a57bbb991177 req-bfcbef31-d2c9-4aca-aa1e-f22bb4fd83bb service nova] Acquired lock "refresh_cache-b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.139695] env[62627]: DEBUG nova.network.neutron [req-c12dbb86-1c37-497d-ba9e-a57bbb991177 req-bfcbef31-d2c9-4aca-aa1e-f22bb4fd83bb service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Refreshing network info cache for port 772cf7cb-3d85-4d24-a40d-d294b5f5c582 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1125.254886] env[62627]: DEBUG oslo_vmware.api [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194985, 'name': PowerOnVM_Task, 'duration_secs': 0.957035} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.257427] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1125.257647] env[62627]: INFO nova.compute.manager [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Took 9.91 seconds to spawn the instance on the hypervisor. [ 1125.257841] env[62627]: DEBUG nova.compute.manager [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1125.258856] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45312ea4-10d6-4ceb-b2d3-28a70fc92f37 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.303031] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54608e49-9886-4615-93ea-016a374616a3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.310756] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce256a5-63bc-4070-b0df-3aedf44ae19a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.347159] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8066d14b-5730-4828-a111-c1a990eef4b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.355481] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9c0d4b-8150-491b-9db9-9f0d180a09ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.376067] env[62627]: DEBUG nova.compute.provider_tree [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1125.421364] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.506664] env[62627]: DEBUG nova.compute.manager [req-52840c0a-b179-4767-89dc-b283fdc5978a req-88d05e8e-439c-466f-8040-024fc06e879b service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Received event network-vif-plugged-823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1125.506903] env[62627]: DEBUG oslo_concurrency.lockutils [req-52840c0a-b179-4767-89dc-b283fdc5978a req-88d05e8e-439c-466f-8040-024fc06e879b service nova] Acquiring lock "b282093d-8013-436d-94d2-bc26161456ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.507127] env[62627]: DEBUG oslo_concurrency.lockutils [req-52840c0a-b179-4767-89dc-b283fdc5978a req-88d05e8e-439c-466f-8040-024fc06e879b service nova] Lock "b282093d-8013-436d-94d2-bc26161456ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.507297] env[62627]: DEBUG oslo_concurrency.lockutils [req-52840c0a-b179-4767-89dc-b283fdc5978a req-88d05e8e-439c-466f-8040-024fc06e879b service nova] Lock "b282093d-8013-436d-94d2-bc26161456ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.507462] env[62627]: DEBUG nova.compute.manager [req-52840c0a-b179-4767-89dc-b283fdc5978a req-88d05e8e-439c-466f-8040-024fc06e879b service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] No waiting events found dispatching network-vif-plugged-823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1125.507626] env[62627]: WARNING nova.compute.manager [req-52840c0a-b179-4767-89dc-b283fdc5978a req-88d05e8e-439c-466f-8040-024fc06e879b service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Received unexpected event network-vif-plugged-823150e9-f3ee-44ab-a36b-c8abdca8b70e for instance with vm_state building and task_state spawning. [ 1125.511269] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194987, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064135} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.511522] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1125.512309] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdab196-b876-479e-842a-c76f973136e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.534829] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1/f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1125.535465] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c8d3802-2e38-4c05-b0ae-fb9842df1c77 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.555134] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1125.555134] env[62627]: value = "task-2194991" [ 1125.555134] env[62627]: _type = "Task" [ 1125.555134] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.564339] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1125.564586] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-120d17d9-e115-4df0-882b-12ecf7d57516 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.572476] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194991, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.574202] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1125.574202] env[62627]: value = "task-2194992" [ 1125.574202] env[62627]: _type = "Task" [ 1125.574202] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.584016] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194992, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.623056] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2194990, 'name': CreateVM_Task, 'duration_secs': 0.35539} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.626264] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1125.627262] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1125.627454] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.627775] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1125.628458] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2203bce-f808-44ee-83ea-79ff0b3bfd69 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.633143] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194986, 'name': Rename_Task, 'duration_secs': 1.00124} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.633751] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1125.634042] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fa72a77-1825-4ecf-af5e-7bbcbe76e74b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.637494] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for the task: (returnval){ [ 1125.637494] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ff3639-42e2-ed68-16e7-85ce949972c6" [ 1125.637494] env[62627]: _type = "Task" [ 1125.637494] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.642529] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1125.642529] env[62627]: value = "task-2194993" [ 1125.642529] env[62627]: _type = "Task" [ 1125.642529] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.653214] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ff3639-42e2-ed68-16e7-85ce949972c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.654021] env[62627]: DEBUG nova.network.neutron [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Successfully updated port: 823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1125.662966] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194993, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.778525] env[62627]: INFO nova.compute.manager [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Took 21.35 seconds to build instance. [ 1125.907560] env[62627]: DEBUG nova.network.neutron [req-c12dbb86-1c37-497d-ba9e-a57bbb991177 req-bfcbef31-d2c9-4aca-aa1e-f22bb4fd83bb service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Updated VIF entry in instance network info cache for port 772cf7cb-3d85-4d24-a40d-d294b5f5c582. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1125.907965] env[62627]: DEBUG nova.network.neutron [req-c12dbb86-1c37-497d-ba9e-a57bbb991177 req-bfcbef31-d2c9-4aca-aa1e-f22bb4fd83bb service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Updating instance_info_cache with network_info: [{"id": "772cf7cb-3d85-4d24-a40d-d294b5f5c582", "address": "fa:16:3e:00:8a:34", "network": {"id": "8a85d360-0b68-4b33-bc55-3570ed7a6337", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1997747330-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e00bc84b05714dfc801b13eb2aae734d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772cf7cb-3d", "ovs_interfaceid": "772cf7cb-3d85-4d24-a40d-d294b5f5c582", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.910476] env[62627]: DEBUG nova.scheduler.client.report [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 110 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1125.910701] env[62627]: DEBUG nova.compute.provider_tree [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 110 to 111 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1125.910917] env[62627]: DEBUG nova.compute.provider_tree [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1126.064999] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194991, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.083615] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194992, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.149827] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ff3639-42e2-ed68-16e7-85ce949972c6, 'name': SearchDatastore_Task, 'duration_secs': 0.013329} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.150524] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1126.150801] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1126.151063] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.151219] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.151401] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1126.151650] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6320396-3a43-4441-a64c-bb3181a3c94f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.156967] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.157112] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.157258] env[62627]: DEBUG nova.network.neutron [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1126.158481] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194993, 'name': PowerOnVM_Task} progress is 96%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.164535] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquiring lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.166894] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1126.167135] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1126.167939] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09eb6294-f873-47f7-af3e-9c719985b0b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.173639] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for the task: (returnval){ [ 1126.173639] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d1b798-68b9-69dc-d147-1f7376c2a34d" [ 1126.173639] env[62627]: _type = "Task" [ 1126.173639] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.182922] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d1b798-68b9-69dc-d147-1f7376c2a34d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.283161] env[62627]: DEBUG oslo_concurrency.lockutils [None req-14fe6df3-5429-4639-9611-43586414b5e2 tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.867s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.283466] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.119s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.283691] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquiring lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.283898] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.284077] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.286284] env[62627]: INFO nova.compute.manager [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Terminating instance [ 1126.415549] env[62627]: DEBUG oslo_concurrency.lockutils [req-c12dbb86-1c37-497d-ba9e-a57bbb991177 req-bfcbef31-d2c9-4aca-aa1e-f22bb4fd83bb service nova] Releasing lock "refresh_cache-b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1126.415856] env[62627]: DEBUG nova.compute.manager [req-c12dbb86-1c37-497d-ba9e-a57bbb991177 req-bfcbef31-d2c9-4aca-aa1e-f22bb4fd83bb service nova] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Received event network-vif-deleted-39451f1f-3666-45ef-8833-8b223f0961e5 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1126.416622] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.393s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.417334] env[62627]: DEBUG nova.compute.manager [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1126.419996] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.360s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.421202] env[62627]: INFO nova.compute.claims [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.565969] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194991, 'name': ReconfigVM_Task, 'duration_secs': 0.811223} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.566275] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfigured VM instance instance-00000056 to attach disk [datastore2] f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1/f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1126.566951] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49528567-2cae-473f-a15a-886b17e344ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.572868] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1126.572868] env[62627]: value = "task-2194994" [ 1126.572868] env[62627]: _type = "Task" [ 1126.572868] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.584672] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194994, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.587779] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194992, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.652460] env[62627]: DEBUG oslo_vmware.api [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2194993, 'name': PowerOnVM_Task, 'duration_secs': 0.557751} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.652729] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1126.652959] env[62627]: DEBUG nova.compute.manager [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1126.653732] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043b5924-90b7-4a8b-b7c3-366faf09f759 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.683117] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d1b798-68b9-69dc-d147-1f7376c2a34d, 'name': SearchDatastore_Task, 'duration_secs': 0.021797} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.683871] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37e165b4-325a-4706-8673-9af8918204f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.688781] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for the task: (returnval){ [ 1126.688781] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]527cde1a-5bc6-f61c-3854-143fb059e8e6" [ 1126.688781] env[62627]: _type = "Task" [ 1126.688781] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.692218] env[62627]: DEBUG nova.network.neutron [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1126.699285] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527cde1a-5bc6-f61c-3854-143fb059e8e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.789540] env[62627]: DEBUG nova.compute.manager [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1126.789848] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1126.790699] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551693a3-e672-4fba-881b-0fccddd52772 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.800567] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1126.800853] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b7ed73b-af3b-4502-8434-8502ddf58c5f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.806107] env[62627]: DEBUG oslo_vmware.api [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for the task: (returnval){ [ 1126.806107] env[62627]: value = "task-2194995" [ 1126.806107] env[62627]: _type = "Task" [ 1126.806107] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.813957] env[62627]: DEBUG oslo_vmware.api [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.847361] env[62627]: DEBUG nova.network.neutron [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updating instance_info_cache with network_info: [{"id": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "address": "fa:16:3e:85:08:41", "network": {"id": "280614ef-fbde-41d9-927f-7471cc50aa14", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-970093695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d5055e1a674886a161a96b0e095451", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap823150e9-f3", "ovs_interfaceid": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.925974] env[62627]: DEBUG nova.compute.utils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1126.932142] env[62627]: DEBUG nova.compute.manager [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1126.932142] env[62627]: DEBUG nova.network.neutron [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1126.994706] env[62627]: DEBUG nova.policy [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8960d0a8ce744e8b0536074751e9d0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f41b0c171180469e9092b8a8da697a78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1127.087323] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194994, 'name': Rename_Task, 'duration_secs': 0.251599} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.094022] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1127.094022] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2194992, 'name': CloneVM_Task, 'duration_secs': 1.355389} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.094022] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f979d3a-0ac6-4478-9881-9a0c8430bcc6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.094022] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Created linked-clone VM from snapshot [ 1127.095224] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e467f8a7-e6f9-4f89-bdc3-42f63f323b55 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.104833] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Uploading image 092e7cd9-297b-4a2d-92b6-29f1a70c0b73 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1127.110479] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1127.110479] env[62627]: value = "task-2194996" [ 1127.110479] env[62627]: _type = "Task" [ 1127.110479] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.120909] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194996, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.139573] env[62627]: DEBUG oslo_vmware.rw_handles [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1127.139573] env[62627]: value = "vm-447777" [ 1127.139573] env[62627]: _type = "VirtualMachine" [ 1127.139573] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1127.140289] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-428fbc2c-6635-41ad-9d7b-46cb8d10362f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.147736] env[62627]: DEBUG oslo_vmware.rw_handles [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lease: (returnval){ [ 1127.147736] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525057b8-266b-403b-61c2-c88addd34404" [ 1127.147736] env[62627]: _type = "HttpNfcLease" [ 1127.147736] env[62627]: } obtained for exporting VM: (result){ [ 1127.147736] env[62627]: value = "vm-447777" [ 1127.147736] env[62627]: _type = "VirtualMachine" [ 1127.147736] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1127.150155] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the lease: (returnval){ [ 1127.150155] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525057b8-266b-403b-61c2-c88addd34404" [ 1127.150155] env[62627]: _type = "HttpNfcLease" [ 1127.150155] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1127.155941] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1127.155941] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525057b8-266b-403b-61c2-c88addd34404" [ 1127.155941] env[62627]: _type = "HttpNfcLease" [ 1127.155941] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1127.175149] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.199354] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527cde1a-5bc6-f61c-3854-143fb059e8e6, 'name': SearchDatastore_Task, 'duration_secs': 0.018749} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.199706] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.200211] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] b7fd9a2a-1960-4dba-93af-2dd5fe30a90d/b7fd9a2a-1960-4dba-93af-2dd5fe30a90d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1127.200385] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd1c15a9-db74-4709-8129-b4db9b8eb424 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.209022] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for the task: (returnval){ [ 1127.209022] env[62627]: value = "task-2194998" [ 1127.209022] env[62627]: _type = "Task" [ 1127.209022] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.218528] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2194998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.317856] env[62627]: DEBUG oslo_vmware.api [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2194995, 'name': PowerOffVM_Task, 'duration_secs': 0.181325} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.318191] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1127.318414] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1127.318688] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b60d2b17-6077-4306-903e-e5068815ebf1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.350195] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.350520] env[62627]: DEBUG nova.compute.manager [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Instance network_info: |[{"id": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "address": "fa:16:3e:85:08:41", "network": {"id": "280614ef-fbde-41d9-927f-7471cc50aa14", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-970093695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d5055e1a674886a161a96b0e095451", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap823150e9-f3", "ovs_interfaceid": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1127.351053] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:08:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8233b50c-be21-441a-a357-a29138a95b8b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '823150e9-f3ee-44ab-a36b-c8abdca8b70e', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1127.362032] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Creating folder: Project (97d5055e1a674886a161a96b0e095451). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1127.363338] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c47d658c-ef75-48ae-aa9e-a4160348b450 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.374922] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Created folder: Project (97d5055e1a674886a161a96b0e095451) in parent group-v447541. [ 1127.375269] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Creating folder: Instances. Parent ref: group-v447778. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1127.375568] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66528199-3f44-478d-8e7b-fb08cb731fdd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.378542] env[62627]: DEBUG nova.network.neutron [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Successfully created port: 9db88239-0a75-4952-8e09-6a6636c9ecb0 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1127.388256] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1127.388428] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1127.388615] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Deleting the datastore file [datastore2] c0e4ea33-2397-4716-8f6a-603fe099fdc5 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1127.389758] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3005edf8-31bf-413c-82ff-1f493ed0199b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.392203] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Created folder: Instances in parent group-v447778. [ 1127.392445] env[62627]: DEBUG oslo.service.loopingcall [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1127.392660] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b282093d-8013-436d-94d2-bc26161456ea] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1127.393260] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac469205-ee38-403b-ae0d-b8d79e97052c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.410096] env[62627]: DEBUG oslo_vmware.api [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for the task: (returnval){ [ 1127.410096] env[62627]: value = "task-2195002" [ 1127.410096] env[62627]: _type = "Task" [ 1127.410096] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.416984] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1127.416984] env[62627]: value = "task-2195003" [ 1127.416984] env[62627]: _type = "Task" [ 1127.416984] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.420416] env[62627]: DEBUG oslo_vmware.api [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2195002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.434920] env[62627]: DEBUG nova.compute.manager [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1127.437788] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195003, 'name': CreateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.535652] env[62627]: DEBUG nova.compute.manager [req-67785620-ee13-4f68-9ae5-6ba0e3a5f29c req-bb6b70ec-56c2-4a83-882a-0167a3e35794 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Received event network-changed-823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1127.535857] env[62627]: DEBUG nova.compute.manager [req-67785620-ee13-4f68-9ae5-6ba0e3a5f29c req-bb6b70ec-56c2-4a83-882a-0167a3e35794 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Refreshing instance network info cache due to event network-changed-823150e9-f3ee-44ab-a36b-c8abdca8b70e. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1127.536083] env[62627]: DEBUG oslo_concurrency.lockutils [req-67785620-ee13-4f68-9ae5-6ba0e3a5f29c req-bb6b70ec-56c2-4a83-882a-0167a3e35794 service nova] Acquiring lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1127.536255] env[62627]: DEBUG oslo_concurrency.lockutils [req-67785620-ee13-4f68-9ae5-6ba0e3a5f29c req-bb6b70ec-56c2-4a83-882a-0167a3e35794 service nova] Acquired lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.536385] env[62627]: DEBUG nova.network.neutron [req-67785620-ee13-4f68-9ae5-6ba0e3a5f29c req-bb6b70ec-56c2-4a83-882a-0167a3e35794 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Refreshing network info cache for port 823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1127.624039] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194996, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.659152] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1127.659152] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525057b8-266b-403b-61c2-c88addd34404" [ 1127.659152] env[62627]: _type = "HttpNfcLease" [ 1127.659152] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1127.659475] env[62627]: DEBUG oslo_vmware.rw_handles [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1127.659475] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525057b8-266b-403b-61c2-c88addd34404" [ 1127.659475] env[62627]: _type = "HttpNfcLease" [ 1127.659475] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1127.660279] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bb4f97-ce2c-4777-af27-5f69aced9057 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.670510] env[62627]: DEBUG oslo_vmware.rw_handles [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e93d8c-7fc6-1e30-7c35-6eae360ed387/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1127.670662] env[62627]: DEBUG oslo_vmware.rw_handles [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e93d8c-7fc6-1e30-7c35-6eae360ed387/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1127.738427] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2194998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491766} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.738699] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] b7fd9a2a-1960-4dba-93af-2dd5fe30a90d/b7fd9a2a-1960-4dba-93af-2dd5fe30a90d.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1127.738927] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1127.739215] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7687e626-8781-4958-843a-1c619dbd5819 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.746114] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for the task: (returnval){ [ 1127.746114] env[62627]: value = "task-2195004" [ 1127.746114] env[62627]: _type = "Task" [ 1127.746114] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.756090] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195004, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.758746] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3a3362e1-9997-47c1-b757-7b3452dc5326 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.798210] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdf75e1-06c5-4feb-bb92-5c6f580430dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.806731] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9742d651-d7d3-4153-906a-53eedb532320 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.839089] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a306de-971c-45cb-90e1-64a3f3f630a4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.847218] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c8a5e8-4a8a-4358-9b06-ab8864f50c67 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.861196] env[62627]: DEBUG nova.compute.provider_tree [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.921334] env[62627]: DEBUG oslo_vmware.api [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Task: {'id': task-2195002, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.401051} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.925017] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1127.925276] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1127.925468] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1127.925677] env[62627]: INFO nova.compute.manager [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1127.926065] env[62627]: DEBUG oslo.service.loopingcall [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1127.926361] env[62627]: DEBUG nova.compute.manager [-] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1127.926468] env[62627]: DEBUG nova.network.neutron [-] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1127.933944] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195003, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.127841] env[62627]: DEBUG oslo_vmware.api [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2194996, 'name': PowerOnVM_Task, 'duration_secs': 0.844614} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.128304] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1128.128494] env[62627]: INFO nova.compute.manager [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1128.128737] env[62627]: DEBUG nova.compute.manager [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1128.129652] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf52d86-95ba-455b-b792-c8bd82f3c0eb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.256387] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195004, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067936} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.256931] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1128.258624] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d18ea72-53dd-4592-97a1-720dc949d8e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.286231] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] b7fd9a2a-1960-4dba-93af-2dd5fe30a90d/b7fd9a2a-1960-4dba-93af-2dd5fe30a90d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1128.286752] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cba5976-a160-451e-bd79-3a41c0db053b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.312790] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for the task: (returnval){ [ 1128.312790] env[62627]: value = "task-2195005" [ 1128.312790] env[62627]: _type = "Task" [ 1128.312790] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.323419] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195005, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.367316] env[62627]: DEBUG nova.scheduler.client.report [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.433822] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195003, 'name': CreateVM_Task, 'duration_secs': 0.5793} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.433822] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b282093d-8013-436d-94d2-bc26161456ea] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1128.433822] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.433822] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.434323] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1128.434499] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26bf18bc-6659-4591-a87b-5f2e0ea2af35 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.440077] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1128.440077] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cb146f-0a41-feb7-4a0d-070b1539a183" [ 1128.440077] env[62627]: _type = "Task" [ 1128.440077] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.441048] env[62627]: DEBUG nova.network.neutron [req-67785620-ee13-4f68-9ae5-6ba0e3a5f29c req-bb6b70ec-56c2-4a83-882a-0167a3e35794 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updated VIF entry in instance network info cache for port 823150e9-f3ee-44ab-a36b-c8abdca8b70e. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1128.442303] env[62627]: DEBUG nova.network.neutron [req-67785620-ee13-4f68-9ae5-6ba0e3a5f29c req-bb6b70ec-56c2-4a83-882a-0167a3e35794 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updating instance_info_cache with network_info: [{"id": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "address": "fa:16:3e:85:08:41", "network": {"id": "280614ef-fbde-41d9-927f-7471cc50aa14", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-970093695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d5055e1a674886a161a96b0e095451", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap823150e9-f3", "ovs_interfaceid": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.448836] env[62627]: DEBUG nova.compute.manager [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1128.456844] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cb146f-0a41-feb7-4a0d-070b1539a183, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.472361] env[62627]: DEBUG nova.virt.hardware [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1128.473026] env[62627]: DEBUG nova.virt.hardware [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1128.473723] env[62627]: DEBUG nova.virt.hardware [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1128.474085] env[62627]: DEBUG nova.virt.hardware [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1128.476023] env[62627]: DEBUG nova.virt.hardware [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1128.476023] env[62627]: DEBUG nova.virt.hardware [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1128.476023] env[62627]: DEBUG nova.virt.hardware [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1128.476023] env[62627]: DEBUG nova.virt.hardware [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1128.476023] env[62627]: DEBUG nova.virt.hardware [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1128.476023] env[62627]: DEBUG nova.virt.hardware [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1128.476023] env[62627]: DEBUG nova.virt.hardware [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1128.477300] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97722577-fbb2-4d4e-9b12-d5abaeac9c7f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.485929] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d162fc4-74e3-4119-91c6-f3a74c71c3e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.650463] env[62627]: INFO nova.compute.manager [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Took 23.42 seconds to build instance. [ 1128.659063] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.659346] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.659532] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.659712] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.659884] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.661864] env[62627]: INFO nova.compute.manager [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Terminating instance [ 1128.829661] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.852215] env[62627]: DEBUG nova.network.neutron [-] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.871730] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.871730] env[62627]: DEBUG nova.compute.manager [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1128.875247] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.454s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.875578] env[62627]: DEBUG nova.objects.instance [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Lazy-loading 'resources' on Instance uuid ae4ec050-5062-4e57-8090-f67f8c8013ff {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1128.948598] env[62627]: DEBUG oslo_concurrency.lockutils [req-67785620-ee13-4f68-9ae5-6ba0e3a5f29c req-bb6b70ec-56c2-4a83-882a-0167a3e35794 service nova] Releasing lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.955904] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cb146f-0a41-feb7-4a0d-070b1539a183, 'name': SearchDatastore_Task, 'duration_secs': 0.020315} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.956614] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.957461] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1128.957739] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.957896] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.958130] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.958432] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2bf96b7-406f-4f2b-a744-27a6beafcca7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.967470] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.967726] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1128.968569] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d594f16c-b795-437e-b4f7-d9140439dc8f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.974460] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1128.974460] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5205c6b7-60e8-970e-702d-23bd7cbc4f4a" [ 1128.974460] env[62627]: _type = "Task" [ 1128.974460] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.985632] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5205c6b7-60e8-970e-702d-23bd7cbc4f4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.153176] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6d62c26-136a-486f-aa78-0863907f5053 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.934s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.167246] env[62627]: DEBUG nova.compute.manager [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1129.167745] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1129.168741] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bec1325-9d95-4c0b-9a5f-e0dfc9e88699 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.178532] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1129.178532] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4537683-25b7-4215-841a-5c48093db89f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.184949] env[62627]: DEBUG oslo_vmware.api [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1129.184949] env[62627]: value = "task-2195006" [ 1129.184949] env[62627]: _type = "Task" [ 1129.184949] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.194516] env[62627]: DEBUG oslo_vmware.api [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.255530] env[62627]: DEBUG nova.compute.manager [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Received event network-changed-291e9d98-ac72-43f7-b745-9f0d290580fa {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1129.255782] env[62627]: DEBUG nova.compute.manager [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Refreshing instance network info cache due to event network-changed-291e9d98-ac72-43f7-b745-9f0d290580fa. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1129.256383] env[62627]: DEBUG oslo_concurrency.lockutils [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] Acquiring lock "refresh_cache-f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.259178] env[62627]: DEBUG oslo_concurrency.lockutils [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] Acquired lock "refresh_cache-f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.259178] env[62627]: DEBUG nova.network.neutron [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Refreshing network info cache for port 291e9d98-ac72-43f7-b745-9f0d290580fa {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1129.326338] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195005, 'name': ReconfigVM_Task, 'duration_secs': 0.591275} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.327461] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Reconfigured VM instance instance-00000057 to attach disk [datastore2] b7fd9a2a-1960-4dba-93af-2dd5fe30a90d/b7fd9a2a-1960-4dba-93af-2dd5fe30a90d.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1129.327698] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac14c02d-c1d1-409e-afb8-06b74b34e86e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.334430] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for the task: (returnval){ [ 1129.334430] env[62627]: value = "task-2195007" [ 1129.334430] env[62627]: _type = "Task" [ 1129.334430] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.343771] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195007, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.347942] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "634d547b-3b5e-4185-8229-daf055ca6e69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.348198] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "634d547b-3b5e-4185-8229-daf055ca6e69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.355335] env[62627]: INFO nova.compute.manager [-] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Took 1.43 seconds to deallocate network for instance. [ 1129.377161] env[62627]: DEBUG nova.compute.utils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1129.380039] env[62627]: DEBUG nova.compute.manager [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1129.380039] env[62627]: DEBUG nova.network.neutron [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1129.390704] env[62627]: DEBUG nova.network.neutron [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Successfully updated port: 9db88239-0a75-4952-8e09-6a6636c9ecb0 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1129.431576] env[62627]: DEBUG nova.policy [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1253a87d708448aba8d97fcabe137853', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c9263b20f714274a55b88a8bdbd5251', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1129.490302] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5205c6b7-60e8-970e-702d-23bd7cbc4f4a, 'name': SearchDatastore_Task, 'duration_secs': 0.011138} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.491381] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5955537-66d8-4962-9aa1-e3d97e14971d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.496977] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1129.496977] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5256211a-4298-ed84-cdf0-f7ccf54926d1" [ 1129.496977] env[62627]: _type = "Task" [ 1129.496977] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.508449] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5256211a-4298-ed84-cdf0-f7ccf54926d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.565549] env[62627]: DEBUG nova.compute.manager [req-8a32701b-4fd7-4ab2-8198-74abbb865d96 req-556e0bc4-916f-480a-8430-de9d048f1031 service nova] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Received event network-vif-deleted-e953f345-dc88-46fa-bee5-036a4f2d9b4c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1129.665189] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300020f0-5b5d-4a04-86e1-6de313a000d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.673503] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb61d8ce-a9cb-4eca-b819-a33df5081895 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.711817] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464b9ff2-799f-46f0-b9ab-ad78d92f3c70 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.721095] env[62627]: DEBUG oslo_vmware.api [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195006, 'name': PowerOffVM_Task, 'duration_secs': 0.199727} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.726845] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1129.726845] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1129.726845] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d04e916-0373-49f9-9aab-88e7ad84378c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.727922] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1134c4a-7cf3-4cd8-ba1e-d80aec3ed397 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.746395] env[62627]: DEBUG nova.compute.provider_tree [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.789646] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1129.789978] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1129.790068] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleting the datastore file [datastore2] 9e0693d9-077a-4a02-b1c5-40fdddcdd27d {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1129.791690] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6cdfe8d-0d2d-4566-828f-e05cb2317542 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.801018] env[62627]: DEBUG oslo_vmware.api [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1129.801018] env[62627]: value = "task-2195009" [ 1129.801018] env[62627]: _type = "Task" [ 1129.801018] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.806326] env[62627]: DEBUG nova.network.neutron [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Successfully created port: 9297477b-3156-43b7-b7ca-4c2d76609f24 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1129.811470] env[62627]: DEBUG oslo_vmware.api [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195009, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.844293] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195007, 'name': Rename_Task, 'duration_secs': 0.260921} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.844715] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1129.844833] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7be29c5-29cc-44b1-b351-35cc3f71b4ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.851172] env[62627]: DEBUG nova.compute.manager [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1129.856019] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for the task: (returnval){ [ 1129.856019] env[62627]: value = "task-2195010" [ 1129.856019] env[62627]: _type = "Task" [ 1129.856019] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.862397] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195010, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.863500] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.884374] env[62627]: DEBUG nova.compute.manager [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1129.893761] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "refresh_cache-3001147e-7be3-4f72-b330-8f99bf7cf3ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.893969] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "refresh_cache-3001147e-7be3-4f72-b330-8f99bf7cf3ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.894168] env[62627]: DEBUG nova.network.neutron [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1130.009732] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5256211a-4298-ed84-cdf0-f7ccf54926d1, 'name': SearchDatastore_Task, 'duration_secs': 0.015817} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.014025] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1130.014359] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] b282093d-8013-436d-94d2-bc26161456ea/b282093d-8013-436d-94d2-bc26161456ea.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1130.014707] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e36ba4b-8c67-4b75-97a0-0b260a6b61e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.022384] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1130.022384] env[62627]: value = "task-2195011" [ 1130.022384] env[62627]: _type = "Task" [ 1130.022384] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.034571] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195011, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.104783] env[62627]: DEBUG nova.network.neutron [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Updated VIF entry in instance network info cache for port 291e9d98-ac72-43f7-b745-9f0d290580fa. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1130.105857] env[62627]: DEBUG nova.network.neutron [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Updating instance_info_cache with network_info: [{"id": "291e9d98-ac72-43f7-b745-9f0d290580fa", "address": "fa:16:3e:c2:63:41", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.154", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap291e9d98-ac", "ovs_interfaceid": "291e9d98-ac72-43f7-b745-9f0d290580fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.249028] env[62627]: DEBUG nova.scheduler.client.report [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1130.313449] env[62627]: DEBUG oslo_vmware.api [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195009, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23412} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.313748] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1130.313987] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1130.314205] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1130.314423] env[62627]: INFO nova.compute.manager [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1130.314772] env[62627]: DEBUG oslo.service.loopingcall [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1130.315031] env[62627]: DEBUG nova.compute.manager [-] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1130.315142] env[62627]: DEBUG nova.network.neutron [-] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1130.378734] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195010, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.388068] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.456277] env[62627]: DEBUG nova.network.neutron [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1130.532752] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195011, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.611567] env[62627]: DEBUG oslo_concurrency.lockutils [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] Releasing lock "refresh_cache-f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1130.611997] env[62627]: DEBUG nova.compute.manager [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Received event network-vif-plugged-9db88239-0a75-4952-8e09-6a6636c9ecb0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1130.612395] env[62627]: DEBUG oslo_concurrency.lockutils [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] Acquiring lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.612803] env[62627]: DEBUG oslo_concurrency.lockutils [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.612803] env[62627]: DEBUG oslo_concurrency.lockutils [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.612803] env[62627]: DEBUG nova.compute.manager [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] No waiting events found dispatching network-vif-plugged-9db88239-0a75-4952-8e09-6a6636c9ecb0 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1130.613225] env[62627]: WARNING nova.compute.manager [req-8554e720-2080-406f-b47d-1d30f841ed64 req-c922138c-a408-49e6-bb27-c0051dbbfbaf service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Received unexpected event network-vif-plugged-9db88239-0a75-4952-8e09-6a6636c9ecb0 for instance with vm_state building and task_state spawning. [ 1130.613555] env[62627]: DEBUG oslo_concurrency.lockutils [None req-143b7d1b-e012-407c-9251-0277960d4789 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.613790] env[62627]: DEBUG oslo_concurrency.lockutils [None req-143b7d1b-e012-407c-9251-0277960d4789 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.614159] env[62627]: DEBUG nova.objects.instance [None req-143b7d1b-e012-407c-9251-0277960d4789 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'flavor' on Instance uuid 84b8381c-bdd4-462e-aa98-0cc972f00882 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1130.706263] env[62627]: DEBUG nova.network.neutron [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Updating instance_info_cache with network_info: [{"id": "9db88239-0a75-4952-8e09-6a6636c9ecb0", "address": "fa:16:3e:70:de:4a", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db88239-0a", "ovs_interfaceid": "9db88239-0a75-4952-8e09-6a6636c9ecb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.758022] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.880s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.758022] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.583s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.758022] env[62627]: DEBUG nova.objects.instance [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1130.776570] env[62627]: INFO nova.scheduler.client.report [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Deleted allocations for instance ae4ec050-5062-4e57-8090-f67f8c8013ff [ 1130.867623] env[62627]: DEBUG oslo_vmware.api [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195010, 'name': PowerOnVM_Task, 'duration_secs': 0.632422} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.868121] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1130.868179] env[62627]: INFO nova.compute.manager [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Took 8.14 seconds to spawn the instance on the hypervisor. [ 1130.868366] env[62627]: DEBUG nova.compute.manager [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1130.869180] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6af3d21-1be8-4e70-ad2b-8da72a20245b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.895459] env[62627]: DEBUG nova.compute.manager [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1130.919935] env[62627]: DEBUG nova.virt.hardware [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1130.920202] env[62627]: DEBUG nova.virt.hardware [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1130.920362] env[62627]: DEBUG nova.virt.hardware [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1130.920539] env[62627]: DEBUG nova.virt.hardware [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1130.920682] env[62627]: DEBUG nova.virt.hardware [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1130.920825] env[62627]: DEBUG nova.virt.hardware [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1130.921047] env[62627]: DEBUG nova.virt.hardware [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1130.921212] env[62627]: DEBUG nova.virt.hardware [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1130.921421] env[62627]: DEBUG nova.virt.hardware [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1130.921606] env[62627]: DEBUG nova.virt.hardware [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1130.921790] env[62627]: DEBUG nova.virt.hardware [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1130.922887] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a80b90e-f8e4-4063-856c-9064518741f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.930959] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e7ae9c-8415-4496-8f15-50b2a4d2e5ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.035030] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195011, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602618} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.035030] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] b282093d-8013-436d-94d2-bc26161456ea/b282093d-8013-436d-94d2-bc26161456ea.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1131.035030] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1131.035287] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4587a12d-dc10-407c-930b-cc292e5fe46e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.043955] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1131.043955] env[62627]: value = "task-2195012" [ 1131.043955] env[62627]: _type = "Task" [ 1131.043955] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.053759] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195012, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.084341] env[62627]: DEBUG nova.network.neutron [-] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.118641] env[62627]: DEBUG nova.objects.instance [None req-143b7d1b-e012-407c-9251-0277960d4789 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'pci_requests' on Instance uuid 84b8381c-bdd4-462e-aa98-0cc972f00882 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.207947] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "refresh_cache-3001147e-7be3-4f72-b330-8f99bf7cf3ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.208366] env[62627]: DEBUG nova.compute.manager [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Instance network_info: |[{"id": "9db88239-0a75-4952-8e09-6a6636c9ecb0", "address": "fa:16:3e:70:de:4a", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db88239-0a", "ovs_interfaceid": "9db88239-0a75-4952-8e09-6a6636c9ecb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1131.208800] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:de:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9db88239-0a75-4952-8e09-6a6636c9ecb0', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1131.216583] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Creating folder: Project (f41b0c171180469e9092b8a8da697a78). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1131.216896] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ae4e041-4ac4-4ac2-924f-c6b9dffdb918 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.228480] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Created folder: Project (f41b0c171180469e9092b8a8da697a78) in parent group-v447541. [ 1131.228677] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Creating folder: Instances. Parent ref: group-v447781. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1131.228935] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85d0a74a-77ed-4ffa-9f69-753ae0a12c53 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.238541] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Created folder: Instances in parent group-v447781. [ 1131.238767] env[62627]: DEBUG oslo.service.loopingcall [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1131.239019] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1131.239245] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72cc48f3-d4b0-4165-85fa-d44ef129a9ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.258468] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1131.258468] env[62627]: value = "task-2195015" [ 1131.258468] env[62627]: _type = "Task" [ 1131.258468] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.269538] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195015, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.284091] env[62627]: DEBUG nova.compute.manager [req-e570d327-3845-4bcc-af4c-624b6b1c1a8f req-bfbcbc0a-7c48-44b3-9a9e-8471d8029363 service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Received event network-changed-9db88239-0a75-4952-8e09-6a6636c9ecb0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1131.284347] env[62627]: DEBUG nova.compute.manager [req-e570d327-3845-4bcc-af4c-624b6b1c1a8f req-bfbcbc0a-7c48-44b3-9a9e-8471d8029363 service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Refreshing instance network info cache due to event network-changed-9db88239-0a75-4952-8e09-6a6636c9ecb0. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1131.284490] env[62627]: DEBUG oslo_concurrency.lockutils [req-e570d327-3845-4bcc-af4c-624b6b1c1a8f req-bfbcbc0a-7c48-44b3-9a9e-8471d8029363 service nova] Acquiring lock "refresh_cache-3001147e-7be3-4f72-b330-8f99bf7cf3ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.284704] env[62627]: DEBUG oslo_concurrency.lockutils [req-e570d327-3845-4bcc-af4c-624b6b1c1a8f req-bfbcbc0a-7c48-44b3-9a9e-8471d8029363 service nova] Acquired lock "refresh_cache-3001147e-7be3-4f72-b330-8f99bf7cf3ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.284876] env[62627]: DEBUG nova.network.neutron [req-e570d327-3845-4bcc-af4c-624b6b1c1a8f req-bfbcbc0a-7c48-44b3-9a9e-8471d8029363 service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Refreshing network info cache for port 9db88239-0a75-4952-8e09-6a6636c9ecb0 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1131.288270] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3227421b-d2da-4f82-988b-120f5dddf6b7 tempest-ServersV294TestFqdnHostnames-171705297 tempest-ServersV294TestFqdnHostnames-171705297-project-member] Lock "ae4ec050-5062-4e57-8090-f67f8c8013ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.576s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.386076] env[62627]: INFO nova.compute.manager [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Took 17.24 seconds to build instance. [ 1131.554020] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195012, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.206714} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.554640] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1131.555405] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9418796f-6c4e-42ed-a5a6-d63e04cf4817 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.579300] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] b282093d-8013-436d-94d2-bc26161456ea/b282093d-8013-436d-94d2-bc26161456ea.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1131.579590] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6616cafd-bc53-4601-ab0c-55d5e8e58311 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.595714] env[62627]: INFO nova.compute.manager [-] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Took 1.28 seconds to deallocate network for instance. [ 1131.596752] env[62627]: DEBUG nova.compute.manager [req-8bbacc41-1ad1-4929-96ee-740e99b1f50c req-d407568b-5d62-4212-8578-6664347610e1 service nova] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Received event network-vif-plugged-9297477b-3156-43b7-b7ca-4c2d76609f24 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1131.597011] env[62627]: DEBUG oslo_concurrency.lockutils [req-8bbacc41-1ad1-4929-96ee-740e99b1f50c req-d407568b-5d62-4212-8578-6664347610e1 service nova] Acquiring lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.597267] env[62627]: DEBUG oslo_concurrency.lockutils [req-8bbacc41-1ad1-4929-96ee-740e99b1f50c req-d407568b-5d62-4212-8578-6664347610e1 service nova] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.597468] env[62627]: DEBUG oslo_concurrency.lockutils [req-8bbacc41-1ad1-4929-96ee-740e99b1f50c req-d407568b-5d62-4212-8578-6664347610e1 service nova] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.597655] env[62627]: DEBUG nova.compute.manager [req-8bbacc41-1ad1-4929-96ee-740e99b1f50c req-d407568b-5d62-4212-8578-6664347610e1 service nova] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] No waiting events found dispatching network-vif-plugged-9297477b-3156-43b7-b7ca-4c2d76609f24 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1131.597890] env[62627]: WARNING nova.compute.manager [req-8bbacc41-1ad1-4929-96ee-740e99b1f50c req-d407568b-5d62-4212-8578-6664347610e1 service nova] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Received unexpected event network-vif-plugged-9297477b-3156-43b7-b7ca-4c2d76609f24 for instance with vm_state building and task_state spawning. [ 1131.605284] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1131.605284] env[62627]: value = "task-2195016" [ 1131.605284] env[62627]: _type = "Task" [ 1131.605284] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.615934] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195016, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.620729] env[62627]: DEBUG nova.objects.base [None req-143b7d1b-e012-407c-9251-0277960d4789 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Object Instance<84b8381c-bdd4-462e-aa98-0cc972f00882> lazy-loaded attributes: flavor,pci_requests {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1131.620941] env[62627]: DEBUG nova.network.neutron [None req-143b7d1b-e012-407c-9251-0277960d4789 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1131.762168] env[62627]: DEBUG nova.network.neutron [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Successfully updated port: 9297477b-3156-43b7-b7ca-4c2d76609f24 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1131.768331] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e197a5a2-992b-42cc-8ce8-7b4920205780 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.769369] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.906s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.770460] env[62627]: DEBUG nova.objects.instance [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Lazy-loading 'resources' on Instance uuid c0e4ea33-2397-4716-8f6a-603fe099fdc5 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.777021] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195015, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.797628] env[62627]: DEBUG oslo_concurrency.lockutils [None req-143b7d1b-e012-407c-9251-0277960d4789 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.184s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.887398] env[62627]: DEBUG oslo_concurrency.lockutils [None req-debd4ef2-0b41-4500-94ac-12ff129c775a tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.756s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.105127] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.106741] env[62627]: DEBUG nova.network.neutron [req-e570d327-3845-4bcc-af4c-624b6b1c1a8f req-bfbcbc0a-7c48-44b3-9a9e-8471d8029363 service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Updated VIF entry in instance network info cache for port 9db88239-0a75-4952-8e09-6a6636c9ecb0. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1132.107076] env[62627]: DEBUG nova.network.neutron [req-e570d327-3845-4bcc-af4c-624b6b1c1a8f req-bfbcbc0a-7c48-44b3-9a9e-8471d8029363 service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Updating instance_info_cache with network_info: [{"id": "9db88239-0a75-4952-8e09-6a6636c9ecb0", "address": "fa:16:3e:70:de:4a", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db88239-0a", "ovs_interfaceid": "9db88239-0a75-4952-8e09-6a6636c9ecb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.118181] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195016, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.275463] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.275733] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.275999] env[62627]: DEBUG nova.network.neutron [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1132.302841] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195015, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.324394] env[62627]: DEBUG nova.network.neutron [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1132.476752] env[62627]: DEBUG nova.network.neutron [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance_info_cache with network_info: [{"id": "9297477b-3156-43b7-b7ca-4c2d76609f24", "address": "fa:16:3e:93:95:7a", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9297477b-31", "ovs_interfaceid": "9297477b-3156-43b7-b7ca-4c2d76609f24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.543754] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c0b3b4-b5fa-473f-b239-6d492ad14d6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.551550] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87df5e9-e020-4b1f-9d66-928b5d7c269a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.583236] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1ca847-83b1-4a78-98c2-6a62e451bd8c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.590828] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f9198f-b07a-4a9d-956f-f13e32db3112 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.604398] env[62627]: DEBUG nova.compute.provider_tree [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.614068] env[62627]: DEBUG oslo_concurrency.lockutils [req-e570d327-3845-4bcc-af4c-624b6b1c1a8f req-bfbcbc0a-7c48-44b3-9a9e-8471d8029363 service nova] Releasing lock "refresh_cache-3001147e-7be3-4f72-b330-8f99bf7cf3ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.614068] env[62627]: DEBUG nova.compute.manager [req-e570d327-3845-4bcc-af4c-624b6b1c1a8f req-bfbcbc0a-7c48-44b3-9a9e-8471d8029363 service nova] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Received event network-vif-deleted-880f6f52-e5e1-4cef-ac19-65931c8f46a6 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1132.619167] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195016, 'name': ReconfigVM_Task, 'duration_secs': 0.734843} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.619431] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Reconfigured VM instance instance-00000058 to attach disk [datastore1] b282093d-8013-436d-94d2-bc26161456ea/b282093d-8013-436d-94d2-bc26161456ea.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1132.620031] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75b7d545-cd3e-446e-8876-dd4e21c6d612 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.627428] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1132.627428] env[62627]: value = "task-2195017" [ 1132.627428] env[62627]: _type = "Task" [ 1132.627428] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.635663] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195017, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.777657] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195015, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.979356] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.979697] env[62627]: DEBUG nova.compute.manager [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Instance network_info: |[{"id": "9297477b-3156-43b7-b7ca-4c2d76609f24", "address": "fa:16:3e:93:95:7a", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9297477b-31", "ovs_interfaceid": "9297477b-3156-43b7-b7ca-4c2d76609f24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1132.980140] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:95:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99be9a5e-b3f9-4e6c-83d5-df11f817847d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9297477b-3156-43b7-b7ca-4c2d76609f24', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1132.992041] env[62627]: DEBUG oslo.service.loopingcall [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1132.992041] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1132.993169] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b1c04bd-da5b-442b-936d-97a97d6b24df {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.019654] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1133.019654] env[62627]: value = "task-2195018" [ 1133.019654] env[62627]: _type = "Task" [ 1133.019654] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.029668] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195018, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.107991] env[62627]: DEBUG nova.scheduler.client.report [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1133.140529] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195017, 'name': Rename_Task, 'duration_secs': 0.271736} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.140529] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1133.140775] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d58f2ada-3d9c-4d4b-a4f6-413d9a452207 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.150053] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1133.150053] env[62627]: value = "task-2195019" [ 1133.150053] env[62627]: _type = "Task" [ 1133.150053] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.158661] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.278243] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195015, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.530976] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195018, 'name': CreateVM_Task, 'duration_secs': 0.345128} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.531220] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1133.531937] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.532122] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.532536] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1133.532746] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8205ad41-65c9-4cc2-8a6a-9e70e46a7a16 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.538034] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1133.538034] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ce6789-877e-a6fe-d957-6fef5891bbad" [ 1133.538034] env[62627]: _type = "Task" [ 1133.538034] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.547656] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ce6789-877e-a6fe-d957-6fef5891bbad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.612809] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.843s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.615365] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.228s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.616931] env[62627]: INFO nova.compute.claims [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1133.639741] env[62627]: INFO nova.scheduler.client.report [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Deleted allocations for instance c0e4ea33-2397-4716-8f6a-603fe099fdc5 [ 1133.660726] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195019, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.665569] env[62627]: DEBUG oslo_concurrency.lockutils [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.665767] env[62627]: DEBUG oslo_concurrency.lockutils [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.666128] env[62627]: DEBUG nova.objects.instance [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'flavor' on Instance uuid 84b8381c-bdd4-462e-aa98-0cc972f00882 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.674406] env[62627]: DEBUG nova.compute.manager [req-5e099fa9-d65b-4a99-ab88-f09ec2bb9f44 req-04056cf5-5b74-465b-a548-4d94e6c9d007 service nova] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Received event network-changed-9297477b-3156-43b7-b7ca-4c2d76609f24 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1133.674406] env[62627]: DEBUG nova.compute.manager [req-5e099fa9-d65b-4a99-ab88-f09ec2bb9f44 req-04056cf5-5b74-465b-a548-4d94e6c9d007 service nova] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Refreshing instance network info cache due to event network-changed-9297477b-3156-43b7-b7ca-4c2d76609f24. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1133.675034] env[62627]: DEBUG oslo_concurrency.lockutils [req-5e099fa9-d65b-4a99-ab88-f09ec2bb9f44 req-04056cf5-5b74-465b-a548-4d94e6c9d007 service nova] Acquiring lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.675034] env[62627]: DEBUG oslo_concurrency.lockutils [req-5e099fa9-d65b-4a99-ab88-f09ec2bb9f44 req-04056cf5-5b74-465b-a548-4d94e6c9d007 service nova] Acquired lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.675168] env[62627]: DEBUG nova.network.neutron [req-5e099fa9-d65b-4a99-ab88-f09ec2bb9f44 req-04056cf5-5b74-465b-a548-4d94e6c9d007 service nova] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Refreshing network info cache for port 9297477b-3156-43b7-b7ca-4c2d76609f24 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1133.778030] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195015, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.049548] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ce6789-877e-a6fe-d957-6fef5891bbad, 'name': SearchDatastore_Task, 'duration_secs': 0.016364} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.049917] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.050222] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1134.051009] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.051009] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.051009] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1134.051247] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-afbf5f38-c520-46c7-afba-40160951b3a7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.053725] env[62627]: DEBUG oslo_concurrency.lockutils [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquiring lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.053887] env[62627]: DEBUG oslo_concurrency.lockutils [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.054140] env[62627]: DEBUG oslo_concurrency.lockutils [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquiring lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.054322] env[62627]: DEBUG oslo_concurrency.lockutils [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.054496] env[62627]: DEBUG oslo_concurrency.lockutils [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.056542] env[62627]: INFO nova.compute.manager [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Terminating instance [ 1134.068794] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1134.068996] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1134.069752] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b70babdd-c075-4517-b519-ab4563660d61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.075904] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1134.075904] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521a3d9a-1363-624d-ad81-c7272e9f6fb8" [ 1134.075904] env[62627]: _type = "Task" [ 1134.075904] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.083838] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521a3d9a-1363-624d-ad81-c7272e9f6fb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.147797] env[62627]: DEBUG oslo_concurrency.lockutils [None req-240ee51e-d42f-41de-b2ff-c1d5dfad2b9b tempest-InstanceActionsV221TestJSON-1439412738 tempest-InstanceActionsV221TestJSON-1439412738-project-member] Lock "c0e4ea33-2397-4716-8f6a-603fe099fdc5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.864s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.160393] env[62627]: DEBUG oslo_vmware.api [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195019, 'name': PowerOnVM_Task, 'duration_secs': 0.741254} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.160497] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1134.160852] env[62627]: INFO nova.compute.manager [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1134.160966] env[62627]: DEBUG nova.compute.manager [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1134.161669] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c6aee4-dfe3-446e-ac3f-a813715c0a80 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.297781] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195015, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.392167] env[62627]: DEBUG nova.objects.instance [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'pci_requests' on Instance uuid 84b8381c-bdd4-462e-aa98-0cc972f00882 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1134.564722] env[62627]: DEBUG nova.compute.manager [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1134.565011] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1134.565927] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2250a20a-127d-407d-8b5e-161bb97f812b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.575755] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1134.576282] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1317838-4778-4d59-801e-cd8db22bfd6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.586332] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521a3d9a-1363-624d-ad81-c7272e9f6fb8, 'name': SearchDatastore_Task, 'duration_secs': 0.034301} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.588166] env[62627]: DEBUG oslo_vmware.api [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for the task: (returnval){ [ 1134.588166] env[62627]: value = "task-2195020" [ 1134.588166] env[62627]: _type = "Task" [ 1134.588166] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.588370] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64570a47-ed34-4fe2-9e21-d9f6eff26744 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.595921] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1134.595921] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5264b8ec-480b-e335-064e-ebc5cda92022" [ 1134.595921] env[62627]: _type = "Task" [ 1134.595921] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.599245] env[62627]: DEBUG oslo_vmware.api [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195020, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.606626] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5264b8ec-480b-e335-064e-ebc5cda92022, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.609215] env[62627]: DEBUG nova.network.neutron [req-5e099fa9-d65b-4a99-ab88-f09ec2bb9f44 req-04056cf5-5b74-465b-a548-4d94e6c9d007 service nova] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updated VIF entry in instance network info cache for port 9297477b-3156-43b7-b7ca-4c2d76609f24. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1134.609570] env[62627]: DEBUG nova.network.neutron [req-5e099fa9-d65b-4a99-ab88-f09ec2bb9f44 req-04056cf5-5b74-465b-a548-4d94e6c9d007 service nova] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance_info_cache with network_info: [{"id": "9297477b-3156-43b7-b7ca-4c2d76609f24", "address": "fa:16:3e:93:95:7a", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9297477b-31", "ovs_interfaceid": "9297477b-3156-43b7-b7ca-4c2d76609f24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.688345] env[62627]: INFO nova.compute.manager [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Took 20.44 seconds to build instance. [ 1134.781232] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195015, 'name': CreateVM_Task, 'duration_secs': 3.456664} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.784522] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1134.787443] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.787782] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.788924] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1134.788924] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-741090d6-4da4-438e-b5a7-8838f4f25fd0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.795122] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1134.795122] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f20b00-085d-feed-cf9e-b4790a36adcf" [ 1134.795122] env[62627]: _type = "Task" [ 1134.795122] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.808640] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f20b00-085d-feed-cf9e-b4790a36adcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.883430] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a7b3c9-f610-45fc-bc0a-1514c6bee02c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.891148] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8febc0-83ad-4f98-b15b-01eb3165c1cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.894942] env[62627]: DEBUG nova.objects.base [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Object Instance<84b8381c-bdd4-462e-aa98-0cc972f00882> lazy-loaded attributes: flavor,pci_requests {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1134.895172] env[62627]: DEBUG nova.network.neutron [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1134.926145] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2739b0b-2523-408b-81ce-b019caf042bb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.937925] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a75a9b-f117-43cf-bde4-607a1c05ce9e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.952927] env[62627]: DEBUG nova.compute.provider_tree [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.967443] env[62627]: DEBUG nova.policy [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5e5eee141e496db2d2ea316c43e6be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e4f7ad3ebcf4c4d97bdbce58eec5a09', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1135.104160] env[62627]: DEBUG oslo_vmware.api [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195020, 'name': PowerOffVM_Task, 'duration_secs': 0.25605} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.109130] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1135.109428] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1135.110539] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d07f18b-fd20-4151-bc3b-71dc84ae9380 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.112987] env[62627]: DEBUG oslo_concurrency.lockutils [req-5e099fa9-d65b-4a99-ab88-f09ec2bb9f44 req-04056cf5-5b74-465b-a548-4d94e6c9d007 service nova] Releasing lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.121392] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5264b8ec-480b-e335-064e-ebc5cda92022, 'name': SearchDatastore_Task, 'duration_secs': 0.016613} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.121392] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.121392] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] abb14e3e-b521-4f21-bc94-6bbb66fbee57/abb14e3e-b521-4f21-bc94-6bbb66fbee57.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1135.121392] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7e22e77-7bd9-4ae8-b5b0-a30ca25c3d98 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.129984] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1135.129984] env[62627]: value = "task-2195022" [ 1135.129984] env[62627]: _type = "Task" [ 1135.129984] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.142357] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195022, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.178646] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1135.178867] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1135.179115] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Deleting the datastore file [datastore2] b7fd9a2a-1960-4dba-93af-2dd5fe30a90d {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1135.179374] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54495858-ccc0-4e2d-9008-085d6384e2e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.186022] env[62627]: DEBUG oslo_vmware.api [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for the task: (returnval){ [ 1135.186022] env[62627]: value = "task-2195023" [ 1135.186022] env[62627]: _type = "Task" [ 1135.186022] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.194929] env[62627]: DEBUG oslo_vmware.api [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.196459] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2e231986-d7d5-468a-a830-7f5074a5a628 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "b282093d-8013-436d-94d2-bc26161456ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.952s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.306343] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f20b00-085d-feed-cf9e-b4790a36adcf, 'name': SearchDatastore_Task, 'duration_secs': 0.022455} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.306597] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.306881] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1135.307169] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1135.307417] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.307637] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1135.308066] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a4cba71-1064-4f2a-b2e4-7f17da826205 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.316242] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1135.316432] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1135.318342] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8225dd3d-691f-4099-a1f1-654dc37d8da4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.323482] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1135.323482] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52282c10-fcfc-6276-3df5-816e1d7f9af5" [ 1135.323482] env[62627]: _type = "Task" [ 1135.323482] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.334021] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52282c10-fcfc-6276-3df5-816e1d7f9af5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.389181] env[62627]: DEBUG nova.network.neutron [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Successfully created port: b8c79070-6650-4427-ab59-c910dccfba54 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1135.456390] env[62627]: DEBUG nova.scheduler.client.report [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1135.646071] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195022, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.702987] env[62627]: DEBUG oslo_vmware.api [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Task: {'id': task-2195023, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392599} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.703282] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1135.703535] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1135.703641] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1135.703751] env[62627]: INFO nova.compute.manager [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1135.704877] env[62627]: DEBUG oslo.service.loopingcall [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1135.704877] env[62627]: DEBUG nova.compute.manager [-] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1135.704877] env[62627]: DEBUG nova.network.neutron [-] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1135.716629] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.716896] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.844319] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52282c10-fcfc-6276-3df5-816e1d7f9af5, 'name': SearchDatastore_Task, 'duration_secs': 0.01009} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.845200] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feb44a15-6b63-4cff-8f29-02cae7dd8c45 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.851403] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1135.851403] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ccf01d-1d6a-1248-38f6-a28c5f77af55" [ 1135.851403] env[62627]: _type = "Task" [ 1135.851403] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.867482] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ccf01d-1d6a-1248-38f6-a28c5f77af55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.962846] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.963987] env[62627]: DEBUG nova.compute.manager [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1135.968991] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.864s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.969176] env[62627]: DEBUG nova.objects.instance [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lazy-loading 'resources' on Instance uuid 9e0693d9-077a-4a02-b1c5-40fdddcdd27d {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.071532] env[62627]: DEBUG nova.compute.manager [req-afefbb34-80a2-4102-ba2d-20ff529e003a req-87781529-c3c8-4433-b50d-96b80ba402d2 service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Received event network-vif-deleted-772cf7cb-3d85-4d24-a40d-d294b5f5c582 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1136.071736] env[62627]: INFO nova.compute.manager [req-afefbb34-80a2-4102-ba2d-20ff529e003a req-87781529-c3c8-4433-b50d-96b80ba402d2 service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Neutron deleted interface 772cf7cb-3d85-4d24-a40d-d294b5f5c582; detaching it from the instance and deleting it from the info cache [ 1136.071971] env[62627]: DEBUG nova.network.neutron [req-afefbb34-80a2-4102-ba2d-20ff529e003a req-87781529-c3c8-4433-b50d-96b80ba402d2 service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.144421] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195022, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610741} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.144791] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] abb14e3e-b521-4f21-bc94-6bbb66fbee57/abb14e3e-b521-4f21-bc94-6bbb66fbee57.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1136.144978] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1136.145422] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04e4560a-41f7-419c-92c9-e90256bcd4c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.153300] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1136.153300] env[62627]: value = "task-2195024" [ 1136.153300] env[62627]: _type = "Task" [ 1136.153300] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.161863] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195024, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.220956] env[62627]: DEBUG nova.compute.manager [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1136.364037] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ccf01d-1d6a-1248-38f6-a28c5f77af55, 'name': SearchDatastore_Task, 'duration_secs': 0.012522} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.364037] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.364037] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 3001147e-7be3-4f72-b330-8f99bf7cf3ea/3001147e-7be3-4f72-b330-8f99bf7cf3ea.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1136.364037] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-278d35b4-f12d-40a6-a1c7-8c3cd4a5d84b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.370897] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1136.370897] env[62627]: value = "task-2195025" [ 1136.370897] env[62627]: _type = "Task" [ 1136.370897] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.383859] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.470687] env[62627]: DEBUG nova.compute.utils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1136.472614] env[62627]: DEBUG nova.compute.manager [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1136.472860] env[62627]: DEBUG nova.network.neutron [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1136.545491] env[62627]: DEBUG nova.network.neutron [-] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.577568] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5307895-c9b0-4855-9376-9f742345fd3f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.588138] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f13af1e-6849-4fa5-bb26-db2a240bf058 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.611425] env[62627]: DEBUG nova.policy [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4289413aec40498dab5f6170d69eb39a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74aed70194cf444884768a5c89fe8613', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1136.640770] env[62627]: DEBUG nova.compute.manager [req-afefbb34-80a2-4102-ba2d-20ff529e003a req-87781529-c3c8-4433-b50d-96b80ba402d2 service nova] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Detach interface failed, port_id=772cf7cb-3d85-4d24-a40d-d294b5f5c582, reason: Instance b7fd9a2a-1960-4dba-93af-2dd5fe30a90d could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1136.667013] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195024, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102981} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.667742] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1136.672088] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8db8ced-7fc9-40ed-8e9f-e7f312576389 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.696731] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] abb14e3e-b521-4f21-bc94-6bbb66fbee57/abb14e3e-b521-4f21-bc94-6bbb66fbee57.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1136.699878] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6dc9d75b-2203-4284-b7b2-495a2bfc418a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.727467] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1136.727467] env[62627]: value = "task-2195026" [ 1136.727467] env[62627]: _type = "Task" [ 1136.727467] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.743224] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195026, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.756479] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.848626] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c25b163-333d-4184-b9a2-0afedfb3c1c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.856903] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72dc51a-75e2-43d6-b46a-7e13bb0ffb13 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.900190] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b075cb-e736-4d15-a028-3283da07e2a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.908269] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195025, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.912665] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d00f1c-c87b-4893-bcf3-676ea0739cb4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.928702] env[62627]: DEBUG nova.compute.provider_tree [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.978798] env[62627]: DEBUG nova.compute.manager [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1137.048390] env[62627]: INFO nova.compute.manager [-] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Took 1.34 seconds to deallocate network for instance. [ 1137.244288] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195026, 'name': ReconfigVM_Task, 'duration_secs': 0.368908} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.244698] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Reconfigured VM instance instance-0000005a to attach disk [datastore2] abb14e3e-b521-4f21-bc94-6bbb66fbee57/abb14e3e-b521-4f21-bc94-6bbb66fbee57.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1137.246440] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c3ddcc9-269a-496e-b2a9-80d825ca955d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.253881] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1137.253881] env[62627]: value = "task-2195027" [ 1137.253881] env[62627]: _type = "Task" [ 1137.253881] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.266421] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195027, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.337333] env[62627]: DEBUG nova.network.neutron [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Successfully created port: 9830a1ce-942e-4954-8513-f624e13d304c {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1137.380584] env[62627]: DEBUG nova.network.neutron [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Successfully updated port: b8c79070-6650-4427-ab59-c910dccfba54 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1137.407790] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544546} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.408078] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 3001147e-7be3-4f72-b330-8f99bf7cf3ea/3001147e-7be3-4f72-b330-8f99bf7cf3ea.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1137.408311] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1137.408559] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22a966c3-c3e0-4585-8d50-7f70656f495d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.417158] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1137.417158] env[62627]: value = "task-2195028" [ 1137.417158] env[62627]: _type = "Task" [ 1137.417158] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.429743] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195028, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.435044] env[62627]: DEBUG nova.scheduler.client.report [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1137.560461] env[62627]: DEBUG oslo_concurrency.lockutils [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.767955] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195027, 'name': Rename_Task, 'duration_secs': 0.165987} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.768644] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1137.769376] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1cde3d30-3e04-420d-84f7-549e9c81c483 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.776963] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1137.776963] env[62627]: value = "task-2195029" [ 1137.776963] env[62627]: _type = "Task" [ 1137.776963] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.789425] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195029, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.796131] env[62627]: DEBUG oslo_vmware.rw_handles [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e93d8c-7fc6-1e30-7c35-6eae360ed387/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1137.797525] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de4c8d2-db89-40ce-ba70-2bf5bb3fe3e7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.803665] env[62627]: DEBUG oslo_vmware.rw_handles [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e93d8c-7fc6-1e30-7c35-6eae360ed387/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1137.803843] env[62627]: ERROR oslo_vmware.rw_handles [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e93d8c-7fc6-1e30-7c35-6eae360ed387/disk-0.vmdk due to incomplete transfer. [ 1137.804096] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-46834086-b9de-434d-a114-2c1b1d2da865 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.813128] env[62627]: DEBUG oslo_vmware.rw_handles [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e93d8c-7fc6-1e30-7c35-6eae360ed387/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1137.813193] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Uploaded image 092e7cd9-297b-4a2d-92b6-29f1a70c0b73 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1137.817117] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1137.817217] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8d49cd1d-4a44-4337-9c92-cbc9509d5617 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.824240] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1137.824240] env[62627]: value = "task-2195030" [ 1137.824240] env[62627]: _type = "Task" [ 1137.824240] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.835826] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195030, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.883408] env[62627]: DEBUG oslo_concurrency.lockutils [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1137.883408] env[62627]: DEBUG oslo_concurrency.lockutils [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.883408] env[62627]: DEBUG nova.network.neutron [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1137.931251] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195028, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.941292] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.969s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.941292] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.184s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.943064] env[62627]: INFO nova.compute.claims [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1137.972991] env[62627]: INFO nova.scheduler.client.report [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted allocations for instance 9e0693d9-077a-4a02-b1c5-40fdddcdd27d [ 1137.989777] env[62627]: DEBUG nova.compute.manager [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1138.020890] env[62627]: DEBUG nova.virt.hardware [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1138.021378] env[62627]: DEBUG nova.virt.hardware [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1138.024035] env[62627]: DEBUG nova.virt.hardware [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1138.024035] env[62627]: DEBUG nova.virt.hardware [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1138.024035] env[62627]: DEBUG nova.virt.hardware [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1138.024035] env[62627]: DEBUG nova.virt.hardware [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1138.024035] env[62627]: DEBUG nova.virt.hardware [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1138.024035] env[62627]: DEBUG nova.virt.hardware [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1138.024035] env[62627]: DEBUG nova.virt.hardware [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1138.024035] env[62627]: DEBUG nova.virt.hardware [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1138.024035] env[62627]: DEBUG nova.virt.hardware [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1138.024035] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d762ba05-ac85-4b61-a7fc-992ae5e1be82 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.033735] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81316c80-8ccf-4f57-b318-473dedf21aa2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.109826] env[62627]: DEBUG nova.compute.manager [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received event network-vif-plugged-b8c79070-6650-4427-ab59-c910dccfba54 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1138.110102] env[62627]: DEBUG oslo_concurrency.lockutils [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] Acquiring lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.110341] env[62627]: DEBUG oslo_concurrency.lockutils [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.110533] env[62627]: DEBUG oslo_concurrency.lockutils [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.110678] env[62627]: DEBUG nova.compute.manager [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] No waiting events found dispatching network-vif-plugged-b8c79070-6650-4427-ab59-c910dccfba54 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1138.110870] env[62627]: WARNING nova.compute.manager [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received unexpected event network-vif-plugged-b8c79070-6650-4427-ab59-c910dccfba54 for instance with vm_state active and task_state None. [ 1138.110986] env[62627]: DEBUG nova.compute.manager [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received event network-changed-b8c79070-6650-4427-ab59-c910dccfba54 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1138.112394] env[62627]: DEBUG nova.compute.manager [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Refreshing instance network info cache due to event network-changed-b8c79070-6650-4427-ab59-c910dccfba54. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1138.112582] env[62627]: DEBUG oslo_concurrency.lockutils [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] Acquiring lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1138.288583] env[62627]: DEBUG oslo_vmware.api [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195029, 'name': PowerOnVM_Task, 'duration_secs': 0.494209} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.288861] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1138.289300] env[62627]: INFO nova.compute.manager [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Took 7.39 seconds to spawn the instance on the hypervisor. [ 1138.289709] env[62627]: DEBUG nova.compute.manager [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1138.290870] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49723c75-fd7b-4f36-8265-e1552e49178d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.337686] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195030, 'name': Destroy_Task, 'duration_secs': 0.404716} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.338034] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Destroyed the VM [ 1138.338292] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1138.338547] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c2fff05c-f879-484e-a49f-0a393ab621ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.345759] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1138.345759] env[62627]: value = "task-2195031" [ 1138.345759] env[62627]: _type = "Task" [ 1138.345759] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.356827] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195031, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.426265] env[62627]: WARNING nova.network.neutron [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] a1c5c227-8f1f-4881-a6d8-f446b64d9846 already exists in list: networks containing: ['a1c5c227-8f1f-4881-a6d8-f446b64d9846']. ignoring it [ 1138.431796] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195028, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.580381} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.432118] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1138.432940] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f3f0a7-6e0c-4a47-b990-823806dad6d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.460109] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 3001147e-7be3-4f72-b330-8f99bf7cf3ea/3001147e-7be3-4f72-b330-8f99bf7cf3ea.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1138.463875] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99017dfd-ed4a-475d-8198-a5a554d01760 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.492531] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1138.492531] env[62627]: value = "task-2195032" [ 1138.492531] env[62627]: _type = "Task" [ 1138.492531] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.492748] env[62627]: DEBUG oslo_concurrency.lockutils [None req-24305e20-8d8a-4184-b925-0809cfcd3e70 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "9e0693d9-077a-4a02-b1c5-40fdddcdd27d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.833s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.503677] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195032, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.681904] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc2f12e-7c59-48fb-9f76-5598ac203463 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.691227] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217e9612-47c7-410a-acbf-8fb57babd058 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.720788] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f6771f-8241-4667-8daa-fea919943eff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.732426] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b965a3a-935c-419a-9c29-a8cc0ad2e636 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.747719] env[62627]: DEBUG nova.compute.provider_tree [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.812307] env[62627]: INFO nova.compute.manager [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Took 17.78 seconds to build instance. [ 1138.861255] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195031, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.928263] env[62627]: DEBUG nova.network.neutron [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updating instance_info_cache with network_info: [{"id": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "address": "fa:16:3e:05:76:ef", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3f3405-39", "ovs_interfaceid": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b8c79070-6650-4427-ab59-c910dccfba54", "address": "fa:16:3e:08:8f:ca", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8c79070-66", "ovs_interfaceid": "b8c79070-6650-4427-ab59-c910dccfba54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.008618] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195032, 'name': ReconfigVM_Task, 'duration_secs': 0.331962} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.008920] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 3001147e-7be3-4f72-b330-8f99bf7cf3ea/3001147e-7be3-4f72-b330-8f99bf7cf3ea.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1139.009633] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a44bdb19-0d5a-498b-85d4-8761f0579fa8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.021280] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1139.021280] env[62627]: value = "task-2195036" [ 1139.021280] env[62627]: _type = "Task" [ 1139.021280] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.030738] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195036, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.087486] env[62627]: DEBUG nova.network.neutron [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Successfully updated port: 9830a1ce-942e-4954-8513-f624e13d304c {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1139.251202] env[62627]: DEBUG nova.scheduler.client.report [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1139.320389] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9fcfc15f-cf73-467a-9265-f815b0158b94 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.294s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.357666] env[62627]: DEBUG oslo_vmware.api [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195031, 'name': RemoveSnapshot_Task, 'duration_secs': 0.716593} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.357928] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1139.358281] env[62627]: INFO nova.compute.manager [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Took 16.40 seconds to snapshot the instance on the hypervisor. [ 1139.431511] env[62627]: DEBUG oslo_concurrency.lockutils [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.432222] env[62627]: DEBUG oslo_concurrency.lockutils [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.432358] env[62627]: DEBUG oslo_concurrency.lockutils [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.432666] env[62627]: DEBUG oslo_concurrency.lockutils [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] Acquired lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.432847] env[62627]: DEBUG nova.network.neutron [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Refreshing network info cache for port b8c79070-6650-4427-ab59-c910dccfba54 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1139.435271] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6580cd2f-026a-437c-ac83-a53dba78f092 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.453457] env[62627]: DEBUG nova.virt.hardware [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.453697] env[62627]: DEBUG nova.virt.hardware [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.453864] env[62627]: DEBUG nova.virt.hardware [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.454054] env[62627]: DEBUG nova.virt.hardware [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.454205] env[62627]: DEBUG nova.virt.hardware [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.454352] env[62627]: DEBUG nova.virt.hardware [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.454554] env[62627]: DEBUG nova.virt.hardware [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.454731] env[62627]: DEBUG nova.virt.hardware [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.454881] env[62627]: DEBUG nova.virt.hardware [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.455075] env[62627]: DEBUG nova.virt.hardware [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.455265] env[62627]: DEBUG nova.virt.hardware [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.461814] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Reconfiguring VM to attach interface {{(pid=62627) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1139.462954] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b4d1ddc-e655-4215-af80-77261a3ebf0d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.480664] env[62627]: DEBUG oslo_vmware.api [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1139.480664] env[62627]: value = "task-2195037" [ 1139.480664] env[62627]: _type = "Task" [ 1139.480664] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.489064] env[62627]: DEBUG oslo_vmware.api [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195037, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.531525] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195036, 'name': Rename_Task, 'duration_secs': 0.286784} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.531808] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1139.532068] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01b66082-08cb-4ef9-8c48-4d6644cc4d14 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.538984] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1139.538984] env[62627]: value = "task-2195038" [ 1139.538984] env[62627]: _type = "Task" [ 1139.538984] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.547599] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195038, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.589565] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.589692] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.589729] env[62627]: DEBUG nova.network.neutron [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1139.756818] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.816s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.756818] env[62627]: DEBUG nova.compute.manager [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1139.759428] env[62627]: DEBUG oslo_concurrency.lockutils [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.199s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.759633] env[62627]: DEBUG nova.objects.instance [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Lazy-loading 'resources' on Instance uuid b7fd9a2a-1960-4dba-93af-2dd5fe30a90d {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.901787] env[62627]: DEBUG nova.compute.manager [None req-12ca1cb5-b141-4427-8ba3-db21cf725159 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Found 1 images (rotation: 2) {{(pid=62627) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1140.001380] env[62627]: DEBUG oslo_vmware.api [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.049727] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195038, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.147431] env[62627]: DEBUG nova.network.neutron [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1140.265982] env[62627]: DEBUG nova.compute.utils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1140.267575] env[62627]: DEBUG nova.compute.manager [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1140.267744] env[62627]: DEBUG nova.network.neutron [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1140.339120] env[62627]: DEBUG nova.policy [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9acb109fb0c046b2b2e6dfc595dd5c07', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97d5055e1a674886a161a96b0e095451', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1140.340000] env[62627]: DEBUG nova.network.neutron [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updated VIF entry in instance network info cache for port b8c79070-6650-4427-ab59-c910dccfba54. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1140.340534] env[62627]: DEBUG nova.network.neutron [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updating instance_info_cache with network_info: [{"id": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "address": "fa:16:3e:05:76:ef", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3f3405-39", "ovs_interfaceid": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b8c79070-6650-4427-ab59-c910dccfba54", "address": "fa:16:3e:08:8f:ca", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8c79070-66", "ovs_interfaceid": "b8c79070-6650-4427-ab59-c910dccfba54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.497240] env[62627]: DEBUG oslo_vmware.api [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195037, 'name': ReconfigVM_Task, 'duration_secs': 0.825125} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.501090] env[62627]: DEBUG oslo_concurrency.lockutils [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.501320] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Reconfigured VM to attach interface {{(pid=62627) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1140.505282] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "f6357f01-d7cb-4477-be2d-98fc17d7a940" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.505492] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "f6357f01-d7cb-4477-be2d-98fc17d7a940" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.505683] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "f6357f01-d7cb-4477-be2d-98fc17d7a940-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.505863] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "f6357f01-d7cb-4477-be2d-98fc17d7a940-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.507576] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "f6357f01-d7cb-4477-be2d-98fc17d7a940-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.510736] env[62627]: INFO nova.compute.manager [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Terminating instance [ 1140.514703] env[62627]: DEBUG nova.compute.manager [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Stashing vm_state: active {{(pid=62627) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1140.545231] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb13638-eeb6-4ce8-bb40-7f481beab6a4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.552920] env[62627]: DEBUG oslo_vmware.api [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195038, 'name': PowerOnVM_Task, 'duration_secs': 0.573538} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.554822] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1140.555049] env[62627]: INFO nova.compute.manager [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Took 12.11 seconds to spawn the instance on the hypervisor. [ 1140.555233] env[62627]: DEBUG nova.compute.manager [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1140.556050] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9548244-86db-493c-a77a-99151cfd87a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.559266] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388d60ad-453f-4b37-8340-d48e4895c224 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.595776] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a66833-07bc-4ec6-81ea-acc95427fc8a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.603721] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6be306e-7825-4399-b8b6-73cc162c85e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.618517] env[62627]: DEBUG nova.compute.provider_tree [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.641816] env[62627]: DEBUG nova.network.neutron [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Successfully created port: 37591ab2-0826-44b0-99ae-03059115c799 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1140.644761] env[62627]: DEBUG nova.network.neutron [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance_info_cache with network_info: [{"id": "9830a1ce-942e-4954-8513-f624e13d304c", "address": "fa:16:3e:bb:02:dc", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9830a1ce-94", "ovs_interfaceid": "9830a1ce-942e-4954-8513-f624e13d304c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.774929] env[62627]: DEBUG nova.compute.manager [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1140.832427] env[62627]: DEBUG nova.compute.manager [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Received event network-vif-plugged-9830a1ce-942e-4954-8513-f624e13d304c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1140.832427] env[62627]: DEBUG oslo_concurrency.lockutils [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] Acquiring lock "634d547b-3b5e-4185-8229-daf055ca6e69-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.832563] env[62627]: DEBUG oslo_concurrency.lockutils [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] Lock "634d547b-3b5e-4185-8229-daf055ca6e69-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.832858] env[62627]: DEBUG oslo_concurrency.lockutils [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] Lock "634d547b-3b5e-4185-8229-daf055ca6e69-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.833192] env[62627]: DEBUG nova.compute.manager [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] No waiting events found dispatching network-vif-plugged-9830a1ce-942e-4954-8513-f624e13d304c {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1140.834068] env[62627]: WARNING nova.compute.manager [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Received unexpected event network-vif-plugged-9830a1ce-942e-4954-8513-f624e13d304c for instance with vm_state building and task_state spawning. [ 1140.834068] env[62627]: DEBUG nova.compute.manager [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Received event network-changed-9830a1ce-942e-4954-8513-f624e13d304c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1140.834068] env[62627]: DEBUG nova.compute.manager [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Refreshing instance network info cache due to event network-changed-9830a1ce-942e-4954-8513-f624e13d304c. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1140.834068] env[62627]: DEBUG oslo_concurrency.lockutils [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] Acquiring lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.846305] env[62627]: DEBUG oslo_concurrency.lockutils [req-f3230f2a-da87-43f0-ae0f-40dc953d346c req-b80baaa9-ab4a-45f5-a7d6-0c9c8b92cb49 service nova] Releasing lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.012451] env[62627]: DEBUG oslo_concurrency.lockutils [None req-02b66e4a-06ca-41b3-82cb-7d2220ca7ca9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.346s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.023196] env[62627]: DEBUG nova.compute.manager [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1141.023486] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1141.024399] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272e9883-cad6-42f9-97f3-b9cc55a4d43d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.032747] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1141.033075] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5e48d57-f9e6-4e61-a475-cf26e5b615a8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.036715] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.040652] env[62627]: DEBUG oslo_vmware.api [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1141.040652] env[62627]: value = "task-2195039" [ 1141.040652] env[62627]: _type = "Task" [ 1141.040652] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.047305] env[62627]: DEBUG oslo_vmware.api [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.102623] env[62627]: INFO nova.compute.manager [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Took 23.04 seconds to build instance. [ 1141.123009] env[62627]: DEBUG nova.scheduler.client.report [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1141.148032] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.148405] env[62627]: DEBUG nova.compute.manager [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Instance network_info: |[{"id": "9830a1ce-942e-4954-8513-f624e13d304c", "address": "fa:16:3e:bb:02:dc", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9830a1ce-94", "ovs_interfaceid": "9830a1ce-942e-4954-8513-f624e13d304c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1141.148803] env[62627]: DEBUG oslo_concurrency.lockutils [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] Acquired lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.149142] env[62627]: DEBUG nova.network.neutron [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Refreshing network info cache for port 9830a1ce-942e-4954-8513-f624e13d304c {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1141.150375] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:02:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '098df9b7-d759-47f7-b756-334848cb423b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9830a1ce-942e-4954-8513-f624e13d304c', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1141.158210] env[62627]: DEBUG oslo.service.loopingcall [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1141.159251] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1141.159511] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64f9956e-ac70-4c8e-bdbb-74c5e27a3b65 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.181360] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1141.181360] env[62627]: value = "task-2195040" [ 1141.181360] env[62627]: _type = "Task" [ 1141.181360] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.191735] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195040, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.442243] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "367a83b9-79f8-43b2-91dd-40d13193fa40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.442542] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "367a83b9-79f8-43b2-91dd-40d13193fa40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.551492] env[62627]: DEBUG oslo_vmware.api [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195039, 'name': PowerOffVM_Task, 'duration_secs': 0.241933} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.551974] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1141.552308] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1141.552711] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea5963db-a926-410a-8179-1c0bc281ec1d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.605407] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e128358a-b5ef-4890-baba-79a87fa7ecd0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.550s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.628493] env[62627]: DEBUG oslo_concurrency.lockutils [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.869s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.631030] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.594s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.653997] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1141.654142] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1141.654324] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Deleting the datastore file [datastore2] f6357f01-d7cb-4477-be2d-98fc17d7a940 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1141.654660] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6838cfa7-cf27-48f1-bd86-f1d53cf45895 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.657827] env[62627]: INFO nova.scheduler.client.report [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Deleted allocations for instance b7fd9a2a-1960-4dba-93af-2dd5fe30a90d [ 1141.668138] env[62627]: DEBUG oslo_vmware.api [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1141.668138] env[62627]: value = "task-2195043" [ 1141.668138] env[62627]: _type = "Task" [ 1141.668138] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.676025] env[62627]: DEBUG oslo_vmware.api [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.689911] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195040, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.789536] env[62627]: DEBUG nova.compute.manager [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1141.819027] env[62627]: DEBUG nova.virt.hardware [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1141.819295] env[62627]: DEBUG nova.virt.hardware [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1141.819450] env[62627]: DEBUG nova.virt.hardware [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1141.819633] env[62627]: DEBUG nova.virt.hardware [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1141.819779] env[62627]: DEBUG nova.virt.hardware [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1141.819927] env[62627]: DEBUG nova.virt.hardware [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1141.820209] env[62627]: DEBUG nova.virt.hardware [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1141.820385] env[62627]: DEBUG nova.virt.hardware [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1141.820554] env[62627]: DEBUG nova.virt.hardware [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1141.820770] env[62627]: DEBUG nova.virt.hardware [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1141.820942] env[62627]: DEBUG nova.virt.hardware [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1141.821812] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6605fab7-a5c6-44e4-aacf-36787212ef80 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.832289] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fac60f4-06d1-498e-8efe-a8f8d37f3f68 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.944575] env[62627]: DEBUG nova.network.neutron [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updated VIF entry in instance network info cache for port 9830a1ce-942e-4954-8513-f624e13d304c. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1141.944996] env[62627]: DEBUG nova.network.neutron [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance_info_cache with network_info: [{"id": "9830a1ce-942e-4954-8513-f624e13d304c", "address": "fa:16:3e:bb:02:dc", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9830a1ce-94", "ovs_interfaceid": "9830a1ce-942e-4954-8513-f624e13d304c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.948287] env[62627]: DEBUG nova.compute.manager [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1142.120040] env[62627]: DEBUG nova.compute.manager [req-9be82ff3-f2b1-45ea-87e7-1310e3aa7500 req-d8414504-c436-455e-ac32-d00be838d36f service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Received event network-vif-plugged-37591ab2-0826-44b0-99ae-03059115c799 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1142.120490] env[62627]: DEBUG oslo_concurrency.lockutils [req-9be82ff3-f2b1-45ea-87e7-1310e3aa7500 req-d8414504-c436-455e-ac32-d00be838d36f service nova] Acquiring lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.121182] env[62627]: DEBUG oslo_concurrency.lockutils [req-9be82ff3-f2b1-45ea-87e7-1310e3aa7500 req-d8414504-c436-455e-ac32-d00be838d36f service nova] Lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.121182] env[62627]: DEBUG oslo_concurrency.lockutils [req-9be82ff3-f2b1-45ea-87e7-1310e3aa7500 req-d8414504-c436-455e-ac32-d00be838d36f service nova] Lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.121182] env[62627]: DEBUG nova.compute.manager [req-9be82ff3-f2b1-45ea-87e7-1310e3aa7500 req-d8414504-c436-455e-ac32-d00be838d36f service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] No waiting events found dispatching network-vif-plugged-37591ab2-0826-44b0-99ae-03059115c799 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1142.121360] env[62627]: WARNING nova.compute.manager [req-9be82ff3-f2b1-45ea-87e7-1310e3aa7500 req-d8414504-c436-455e-ac32-d00be838d36f service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Received unexpected event network-vif-plugged-37591ab2-0826-44b0-99ae-03059115c799 for instance with vm_state building and task_state spawning. [ 1142.135950] env[62627]: INFO nova.compute.claims [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1142.174635] env[62627]: DEBUG oslo_concurrency.lockutils [None req-761becfe-42d0-427d-bb4a-371c297cadf1 tempest-ServerTagsTestJSON-1489180910 tempest-ServerTagsTestJSON-1489180910-project-member] Lock "b7fd9a2a-1960-4dba-93af-2dd5fe30a90d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.120s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.184052] env[62627]: DEBUG oslo_vmware.api [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.195416] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195040, 'name': CreateVM_Task, 'duration_secs': 0.780993} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.196230] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1142.197380] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.197634] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.198124] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1142.198968] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db05c671-5f3c-43dc-8744-66258e9c50b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.208025] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1142.208025] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523ad080-ab79-e80d-eecd-b13f3df26239" [ 1142.208025] env[62627]: _type = "Task" [ 1142.208025] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.216972] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523ad080-ab79-e80d-eecd-b13f3df26239, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.232860] env[62627]: DEBUG nova.network.neutron [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Successfully updated port: 37591ab2-0826-44b0-99ae-03059115c799 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1142.360022] env[62627]: DEBUG nova.compute.manager [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1142.360022] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9b55f1-b737-4919-86af-788f1a5f9d74 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.448149] env[62627]: DEBUG oslo_concurrency.lockutils [req-a03fddfb-e046-4c5f-8ecd-2285692b911f req-3045ab86-c749-4b85-83d8-a4cd40a2b228 service nova] Releasing lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.505476] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.643872] env[62627]: INFO nova.compute.resource_tracker [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating resource usage from migration eb241675-c37b-49bb-9c2a-1c17acedf01d [ 1142.682440] env[62627]: DEBUG oslo_vmware.api [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195043, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.549709} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.682561] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1142.682708] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1142.682891] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1142.686519] env[62627]: INFO nova.compute.manager [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1142.686915] env[62627]: DEBUG oslo.service.loopingcall [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1142.687105] env[62627]: DEBUG nova.compute.manager [-] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1142.687211] env[62627]: DEBUG nova.network.neutron [-] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1142.724919] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523ad080-ab79-e80d-eecd-b13f3df26239, 'name': SearchDatastore_Task, 'duration_secs': 0.013359} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.731668] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.731940] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1142.732397] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.732572] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.732756] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1142.733960] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ea6d6b7-3195-4a4c-a697-9adcb3e7d70e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.739265] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "refresh_cache-28e42d69-0c19-4e3c-ae49-9076fdc65aa8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.739454] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired lock "refresh_cache-28e42d69-0c19-4e3c-ae49-9076fdc65aa8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.739547] env[62627]: DEBUG nova.network.neutron [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1142.748870] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1142.749135] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1142.750069] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e990850b-3e2f-47ea-b132-6d230432f40b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.765405] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1142.765405] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521123a9-e8f1-2fd7-825b-faa9983d2869" [ 1142.765405] env[62627]: _type = "Task" [ 1142.765405] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.784383] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521123a9-e8f1-2fd7-825b-faa9983d2869, 'name': SearchDatastore_Task, 'duration_secs': 0.011474} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.789962] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-caba0bd8-eb85-4064-998e-e862a1e788d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.800813] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1142.800813] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b557f1-c556-17f8-96da-eea53ae46ed3" [ 1142.800813] env[62627]: _type = "Task" [ 1142.800813] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.818143] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b557f1-c556-17f8-96da-eea53ae46ed3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.860185] env[62627]: DEBUG nova.compute.manager [req-605ab587-17fe-48cc-8f21-e04a6ed1d36a req-b96b9da3-7a3d-427f-9acc-61769c562201 service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Received event network-changed-9db88239-0a75-4952-8e09-6a6636c9ecb0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1142.860520] env[62627]: DEBUG nova.compute.manager [req-605ab587-17fe-48cc-8f21-e04a6ed1d36a req-b96b9da3-7a3d-427f-9acc-61769c562201 service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Refreshing instance network info cache due to event network-changed-9db88239-0a75-4952-8e09-6a6636c9ecb0. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1142.860853] env[62627]: DEBUG oslo_concurrency.lockutils [req-605ab587-17fe-48cc-8f21-e04a6ed1d36a req-b96b9da3-7a3d-427f-9acc-61769c562201 service nova] Acquiring lock "refresh_cache-3001147e-7be3-4f72-b330-8f99bf7cf3ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.861110] env[62627]: DEBUG oslo_concurrency.lockutils [req-605ab587-17fe-48cc-8f21-e04a6ed1d36a req-b96b9da3-7a3d-427f-9acc-61769c562201 service nova] Acquired lock "refresh_cache-3001147e-7be3-4f72-b330-8f99bf7cf3ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.861376] env[62627]: DEBUG nova.network.neutron [req-605ab587-17fe-48cc-8f21-e04a6ed1d36a req-b96b9da3-7a3d-427f-9acc-61769c562201 service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Refreshing network info cache for port 9db88239-0a75-4952-8e09-6a6636c9ecb0 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1142.870762] env[62627]: INFO nova.compute.manager [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] instance snapshotting [ 1142.873062] env[62627]: DEBUG nova.objects.instance [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'flavor' on Instance uuid d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.951773] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e916b15-5a98-45f7-a6db-90e0a83ffe81 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.960925] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a23f102-932c-4dd6-bd0d-3df0f799cc00 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.993550] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6bc0b7d-b316-4c4d-ad27-091743f5b91e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.002316] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2f3905-4784-4d0f-8d3c-f7b5c963e459 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.017908] env[62627]: DEBUG nova.compute.provider_tree [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.232733] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-aa0a7b01-9a61-4e32-82cb-8f849bff6266" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.232993] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-aa0a7b01-9a61-4e32-82cb-8f849bff6266" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.233401] env[62627]: DEBUG nova.objects.instance [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'flavor' on Instance uuid 84b8381c-bdd4-462e-aa98-0cc972f00882 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.276445] env[62627]: DEBUG nova.network.neutron [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1143.326343] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b557f1-c556-17f8-96da-eea53ae46ed3, 'name': SearchDatastore_Task, 'duration_secs': 0.016649} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.326808] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.327110] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 634d547b-3b5e-4185-8229-daf055ca6e69/634d547b-3b5e-4185-8229-daf055ca6e69.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1143.327379] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1907476-93de-4404-8404-c97c9e4bf7ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.335345] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1143.335345] env[62627]: value = "task-2195044" [ 1143.335345] env[62627]: _type = "Task" [ 1143.335345] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.346161] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195044, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.380106] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbc2927-d7ce-4dfa-a887-0c4fb437d164 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.401068] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db02e2ae-f0ee-49bb-b970-b9758fcd5e1b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.506794] env[62627]: DEBUG nova.network.neutron [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Updating instance_info_cache with network_info: [{"id": "37591ab2-0826-44b0-99ae-03059115c799", "address": "fa:16:3e:b8:7c:66", "network": {"id": "280614ef-fbde-41d9-927f-7471cc50aa14", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-970093695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d5055e1a674886a161a96b0e095451", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37591ab2-08", "ovs_interfaceid": "37591ab2-0826-44b0-99ae-03059115c799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.521562] env[62627]: DEBUG nova.scheduler.client.report [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1143.677264] env[62627]: DEBUG nova.network.neutron [req-605ab587-17fe-48cc-8f21-e04a6ed1d36a req-b96b9da3-7a3d-427f-9acc-61769c562201 service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Updated VIF entry in instance network info cache for port 9db88239-0a75-4952-8e09-6a6636c9ecb0. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1143.677728] env[62627]: DEBUG nova.network.neutron [req-605ab587-17fe-48cc-8f21-e04a6ed1d36a req-b96b9da3-7a3d-427f-9acc-61769c562201 service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Updating instance_info_cache with network_info: [{"id": "9db88239-0a75-4952-8e09-6a6636c9ecb0", "address": "fa:16:3e:70:de:4a", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9db88239-0a", "ovs_interfaceid": "9db88239-0a75-4952-8e09-6a6636c9ecb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.835237] env[62627]: DEBUG nova.objects.instance [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'pci_requests' on Instance uuid 84b8381c-bdd4-462e-aa98-0cc972f00882 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.847542] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195044, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.913451] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1143.914657] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6a027ef5-6294-4e44-98c4-2071eff2572b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.925054] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1143.925054] env[62627]: value = "task-2195046" [ 1143.925054] env[62627]: _type = "Task" [ 1143.925054] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.935747] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195046, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.010479] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lock "refresh_cache-28e42d69-0c19-4e3c-ae49-9076fdc65aa8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.010865] env[62627]: DEBUG nova.compute.manager [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Instance network_info: |[{"id": "37591ab2-0826-44b0-99ae-03059115c799", "address": "fa:16:3e:b8:7c:66", "network": {"id": "280614ef-fbde-41d9-927f-7471cc50aa14", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-970093695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d5055e1a674886a161a96b0e095451", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37591ab2-08", "ovs_interfaceid": "37591ab2-0826-44b0-99ae-03059115c799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1144.011348] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:7c:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8233b50c-be21-441a-a357-a29138a95b8b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '37591ab2-0826-44b0-99ae-03059115c799', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1144.020046] env[62627]: DEBUG oslo.service.loopingcall [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1144.020362] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1144.021140] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-778d5d8e-0f65-4530-b0b9-88af033765d7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.037702] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.407s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.038044] env[62627]: INFO nova.compute.manager [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Migrating [ 1144.045737] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.540s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.048996] env[62627]: INFO nova.compute.claims [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1144.067056] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1144.067056] env[62627]: value = "task-2195047" [ 1144.067056] env[62627]: _type = "Task" [ 1144.067056] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.080724] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195047, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.180892] env[62627]: DEBUG oslo_concurrency.lockutils [req-605ab587-17fe-48cc-8f21-e04a6ed1d36a req-b96b9da3-7a3d-427f-9acc-61769c562201 service nova] Releasing lock "refresh_cache-3001147e-7be3-4f72-b330-8f99bf7cf3ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.286198] env[62627]: DEBUG nova.compute.manager [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Received event network-changed-37591ab2-0826-44b0-99ae-03059115c799 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1144.286534] env[62627]: DEBUG nova.compute.manager [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Refreshing instance network info cache due to event network-changed-37591ab2-0826-44b0-99ae-03059115c799. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1144.286690] env[62627]: DEBUG oslo_concurrency.lockutils [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] Acquiring lock "refresh_cache-28e42d69-0c19-4e3c-ae49-9076fdc65aa8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.287876] env[62627]: DEBUG oslo_concurrency.lockutils [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] Acquired lock "refresh_cache-28e42d69-0c19-4e3c-ae49-9076fdc65aa8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.288275] env[62627]: DEBUG nova.network.neutron [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Refreshing network info cache for port 37591ab2-0826-44b0-99ae-03059115c799 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1144.345619] env[62627]: DEBUG nova.objects.base [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Object Instance<84b8381c-bdd4-462e-aa98-0cc972f00882> lazy-loaded attributes: flavor,pci_requests {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1144.345619] env[62627]: DEBUG nova.network.neutron [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1144.351297] env[62627]: DEBUG nova.network.neutron [-] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.352774] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195044, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.005595} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.354601] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 634d547b-3b5e-4185-8229-daf055ca6e69/634d547b-3b5e-4185-8229-daf055ca6e69.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1144.354601] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1144.356722] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2ea0187-4833-4daf-88e3-53945a109830 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.373265] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1144.373265] env[62627]: value = "task-2195048" [ 1144.373265] env[62627]: _type = "Task" [ 1144.373265] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.384905] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.436243] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195046, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.465621] env[62627]: DEBUG nova.policy [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5e5eee141e496db2d2ea316c43e6be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e4f7ad3ebcf4c4d97bdbce58eec5a09', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1144.567562] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.567562] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.567562] env[62627]: DEBUG nova.network.neutron [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1144.587672] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195047, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.855429] env[62627]: INFO nova.compute.manager [-] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Took 2.17 seconds to deallocate network for instance. [ 1144.867789] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "e9d33611-e373-4303-937d-b43d893e51f5" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.867789] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.867789] env[62627]: INFO nova.compute.manager [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Rebooting instance [ 1144.884784] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.18899} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.885083] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1144.886050] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb4718a-97f0-4a24-9e1a-14697a7dc1f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.912362] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 634d547b-3b5e-4185-8229-daf055ca6e69/634d547b-3b5e-4185-8229-daf055ca6e69.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1144.915180] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8abe19af-b73b-477b-b1d2-0d9aff0a524e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.940265] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195046, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.943368] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1144.943368] env[62627]: value = "task-2195049" [ 1144.943368] env[62627]: _type = "Task" [ 1144.943368] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.953903] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195049, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.081564] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195047, 'name': CreateVM_Task, 'duration_secs': 0.808978} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.081752] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1145.082724] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.082892] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.083229] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1145.083491] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-040eb5ba-2cda-49ee-94dc-300483d30076 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.085915] env[62627]: DEBUG nova.network.neutron [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Updated VIF entry in instance network info cache for port 37591ab2-0826-44b0-99ae-03059115c799. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1145.086264] env[62627]: DEBUG nova.network.neutron [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Updating instance_info_cache with network_info: [{"id": "37591ab2-0826-44b0-99ae-03059115c799", "address": "fa:16:3e:b8:7c:66", "network": {"id": "280614ef-fbde-41d9-927f-7471cc50aa14", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-970093695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d5055e1a674886a161a96b0e095451", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37591ab2-08", "ovs_interfaceid": "37591ab2-0826-44b0-99ae-03059115c799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.095021] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1145.095021] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521590eb-d0c2-ea54-2fbb-6d5e3133b9ca" [ 1145.095021] env[62627]: _type = "Task" [ 1145.095021] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.105051] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521590eb-d0c2-ea54-2fbb-6d5e3133b9ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.284602] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cc1898-96f7-4b17-a002-dadfd2b4135a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.293275] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31344214-ff2e-4dac-b7a0-9f9f0618accc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.323775] env[62627]: DEBUG nova.network.neutron [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance_info_cache with network_info: [{"id": "9297477b-3156-43b7-b7ca-4c2d76609f24", "address": "fa:16:3e:93:95:7a", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9297477b-31", "ovs_interfaceid": "9297477b-3156-43b7-b7ca-4c2d76609f24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.325559] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4f4234-1b36-425e-898c-b70a166b7ca3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.333896] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89db59f3-50ed-4dba-9e91-b1e9a8defae8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.350710] env[62627]: DEBUG nova.compute.provider_tree [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.363643] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.383244] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.383573] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.383975] env[62627]: DEBUG nova.network.neutron [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1145.440427] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195046, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.451500] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195049, 'name': ReconfigVM_Task, 'duration_secs': 0.306814} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.451767] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 634d547b-3b5e-4185-8229-daf055ca6e69/634d547b-3b5e-4185-8229-daf055ca6e69.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1145.452401] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11737deb-0cac-4a82-9f97-9fefdd2d67e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.459962] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1145.459962] env[62627]: value = "task-2195050" [ 1145.459962] env[62627]: _type = "Task" [ 1145.459962] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.468378] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195050, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.591818] env[62627]: DEBUG oslo_concurrency.lockutils [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] Releasing lock "refresh_cache-28e42d69-0c19-4e3c-ae49-9076fdc65aa8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.592294] env[62627]: DEBUG nova.compute.manager [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Received event network-vif-deleted-43ceb60f-795b-49ef-b06d-892b278ac067 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1145.592436] env[62627]: INFO nova.compute.manager [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Neutron deleted interface 43ceb60f-795b-49ef-b06d-892b278ac067; detaching it from the instance and deleting it from the info cache [ 1145.592740] env[62627]: DEBUG nova.network.neutron [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.606201] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521590eb-d0c2-ea54-2fbb-6d5e3133b9ca, 'name': SearchDatastore_Task, 'duration_secs': 0.013469} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.606486] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.606740] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1145.607031] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.607190] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.607367] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1145.607799] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbdbd5da-4a63-440a-b569-dcae33efc4b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.618038] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1145.618232] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1145.618999] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b995590-7a24-4b28-b5e6-6721d5f4db22 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.624668] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1145.624668] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e9f8bb-521c-889e-b2d1-9f11ae2dfd89" [ 1145.624668] env[62627]: _type = "Task" [ 1145.624668] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.632551] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e9f8bb-521c-889e-b2d1-9f11ae2dfd89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.829608] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.853463] env[62627]: DEBUG nova.scheduler.client.report [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1145.942107] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195046, 'name': CreateSnapshot_Task, 'duration_secs': 1.592496} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.942283] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1145.943055] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef4cd50-4d4b-4ff8-a988-685b78a9220f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.971563] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195050, 'name': Rename_Task, 'duration_secs': 0.173059} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.971837] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1145.972103] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4bacf880-0051-4ce3-8ed2-0180b7a50f69 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.983736] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1145.983736] env[62627]: value = "task-2195052" [ 1145.983736] env[62627]: _type = "Task" [ 1145.983736] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.992455] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.096092] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6b666d92-2cfc-4b5c-9c3d-244948ee3229 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.106998] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce47b14-6daa-42f1-8d56-ab4a11630eb1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.138037] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e9f8bb-521c-889e-b2d1-9f11ae2dfd89, 'name': SearchDatastore_Task, 'duration_secs': 0.009332} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.138990] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f33e3d6-1413-4cfd-8f17-cdf1d9b6a5af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.152840] env[62627]: DEBUG nova.compute.manager [req-fd0fa8aa-9aad-4506-9c4a-060b771e60d3 req-8ed2f18c-3e5a-4152-9482-8d4c17052df0 service nova] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Detach interface failed, port_id=43ceb60f-795b-49ef-b06d-892b278ac067, reason: Instance f6357f01-d7cb-4477-be2d-98fc17d7a940 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1146.160521] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1146.160521] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523b5473-21b5-5f93-0bf7-d3ffff8d9e42" [ 1146.160521] env[62627]: _type = "Task" [ 1146.160521] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.171667] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523b5473-21b5-5f93-0bf7-d3ffff8d9e42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.172697] env[62627]: DEBUG nova.network.neutron [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Successfully updated port: aa0a7b01-9a61-4e32-82cb-8f849bff6266 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1146.204620] env[62627]: DEBUG nova.network.neutron [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance_info_cache with network_info: [{"id": "5825a537-f659-4e7e-907e-e22208e06c0c", "address": "fa:16:3e:a9:0b:9d", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5825a537-f6", "ovs_interfaceid": "5825a537-f659-4e7e-907e-e22208e06c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.317684] env[62627]: DEBUG nova.compute.manager [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received event network-vif-plugged-aa0a7b01-9a61-4e32-82cb-8f849bff6266 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1146.317890] env[62627]: DEBUG oslo_concurrency.lockutils [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] Acquiring lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.318121] env[62627]: DEBUG oslo_concurrency.lockutils [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.318359] env[62627]: DEBUG oslo_concurrency.lockutils [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.318601] env[62627]: DEBUG nova.compute.manager [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] No waiting events found dispatching network-vif-plugged-aa0a7b01-9a61-4e32-82cb-8f849bff6266 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1146.318823] env[62627]: WARNING nova.compute.manager [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received unexpected event network-vif-plugged-aa0a7b01-9a61-4e32-82cb-8f849bff6266 for instance with vm_state active and task_state None. [ 1146.319058] env[62627]: DEBUG nova.compute.manager [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received event network-changed-aa0a7b01-9a61-4e32-82cb-8f849bff6266 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1146.319270] env[62627]: DEBUG nova.compute.manager [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Refreshing instance network info cache due to event network-changed-aa0a7b01-9a61-4e32-82cb-8f849bff6266. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1146.319502] env[62627]: DEBUG oslo_concurrency.lockutils [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] Acquiring lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.319649] env[62627]: DEBUG oslo_concurrency.lockutils [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] Acquired lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.319838] env[62627]: DEBUG nova.network.neutron [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Refreshing network info cache for port aa0a7b01-9a61-4e32-82cb-8f849bff6266 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1146.357608] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.312s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.358106] env[62627]: DEBUG nova.compute.manager [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1146.360534] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.997s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.360745] env[62627]: DEBUG nova.objects.instance [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lazy-loading 'resources' on Instance uuid f6357f01-d7cb-4477-be2d-98fc17d7a940 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.463645] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1146.463944] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fcbdbbd0-0bf7-4a75-9862-dc5926522899 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.474264] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1146.474264] env[62627]: value = "task-2195053" [ 1146.474264] env[62627]: _type = "Task" [ 1146.474264] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.483474] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195053, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.492093] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195052, 'name': PowerOnVM_Task} progress is 80%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.671694] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523b5473-21b5-5f93-0bf7-d3ffff8d9e42, 'name': SearchDatastore_Task, 'duration_secs': 0.021252} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.672128] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.672256] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 28e42d69-0c19-4e3c-ae49-9076fdc65aa8/28e42d69-0c19-4e3c-ae49-9076fdc65aa8.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1146.672532] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9b7656a-d8d6-44d2-b23f-0c778ec811b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.675146] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.684129] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1146.684129] env[62627]: value = "task-2195054" [ 1146.684129] env[62627]: _type = "Task" [ 1146.684129] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.697174] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195054, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.707378] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.864765] env[62627]: DEBUG nova.compute.utils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1146.869071] env[62627]: DEBUG nova.compute.manager [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1146.869246] env[62627]: DEBUG nova.network.neutron [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1146.910475] env[62627]: DEBUG nova.policy [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2232b2002014411a9183e2139700ea98', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '711f8ab1a2d5466399f408870e412587', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1146.993616] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195053, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.000272] env[62627]: DEBUG oslo_vmware.api [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195052, 'name': PowerOnVM_Task, 'duration_secs': 0.73305} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.003772] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1147.004025] env[62627]: INFO nova.compute.manager [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Took 9.01 seconds to spawn the instance on the hypervisor. [ 1147.004501] env[62627]: DEBUG nova.compute.manager [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1147.005635] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8655f4ca-50e2-462a-85a2-205c00a4f8e5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.084400] env[62627]: DEBUG nova.network.neutron [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Added VIF to instance network info cache for port aa0a7b01-9a61-4e32-82cb-8f849bff6266. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1147.084977] env[62627]: DEBUG nova.network.neutron [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updating instance_info_cache with network_info: [{"id": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "address": "fa:16:3e:05:76:ef", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3f3405-39", "ovs_interfaceid": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b8c79070-6650-4427-ab59-c910dccfba54", "address": "fa:16:3e:08:8f:ca", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8c79070-66", "ovs_interfaceid": "b8c79070-6650-4427-ab59-c910dccfba54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa0a7b01-9a61-4e32-82cb-8f849bff6266", "address": "fa:16:3e:fe:a1:e5", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa0a7b01-9a", "ovs_interfaceid": "aa0a7b01-9a61-4e32-82cb-8f849bff6266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.120865] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d412bdf1-88df-4bf6-a5d6-d588373a0f26 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.132867] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf01084-3dcc-4d7a-9e30-b5b067036a0f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.166569] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54dca10b-be22-4025-a6cd-b8986872ae1e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.177608] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feec3f8f-358e-410f-9c3b-05b0731a4b95 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.199889] env[62627]: DEBUG nova.compute.provider_tree [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.206794] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195054, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.213454] env[62627]: DEBUG nova.compute.manager [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1147.215096] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dc758e-e9e3-4d72-9943-b12b5954ee1d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.337388] env[62627]: DEBUG nova.network.neutron [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Successfully created port: 4534e6a9-471a-4703-aa7a-7aac3d20292f {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1147.345996] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53de2610-9c67-4cf5-9002-9bf22efac810 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.364815] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance 'abb14e3e-b521-4f21-bc94-6bbb66fbee57' progress to 0 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1147.370258] env[62627]: DEBUG nova.compute.utils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1147.485622] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195053, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.533790] env[62627]: INFO nova.compute.manager [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Took 17.18 seconds to build instance. [ 1147.594566] env[62627]: DEBUG oslo_concurrency.lockutils [req-09fc8b01-7855-43f4-b5e8-e0529fe6ff7e req-aa9b2bd8-7d5b-4309-a415-23cbfd14f408 service nova] Releasing lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1147.594566] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.594566] env[62627]: DEBUG nova.network.neutron [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1147.696266] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195054, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.660132} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.697229] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 28e42d69-0c19-4e3c-ae49-9076fdc65aa8/28e42d69-0c19-4e3c-ae49-9076fdc65aa8.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1147.697674] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1147.698066] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a321b206-2a95-4ece-a57f-58013361be76 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.705146] env[62627]: DEBUG nova.scheduler.client.report [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1147.711334] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1147.711334] env[62627]: value = "task-2195055" [ 1147.711334] env[62627]: _type = "Task" [ 1147.711334] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.723818] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195055, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.871249] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1147.871578] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d7a88e6-8435-4929-b704-ad88617f6a4a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.873699] env[62627]: DEBUG nova.compute.manager [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1147.884442] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1147.884442] env[62627]: value = "task-2195057" [ 1147.884442] env[62627]: _type = "Task" [ 1147.884442] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.897774] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195057, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.986974] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195053, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.036973] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3edc68b0-aa09-4fb3-972a-ac9b7ead33ed tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "634d547b-3b5e-4185-8229-daf055ca6e69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.688s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.138349] env[62627]: WARNING nova.network.neutron [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] a1c5c227-8f1f-4881-a6d8-f446b64d9846 already exists in list: networks containing: ['a1c5c227-8f1f-4881-a6d8-f446b64d9846']. ignoring it [ 1148.138693] env[62627]: WARNING nova.network.neutron [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] a1c5c227-8f1f-4881-a6d8-f446b64d9846 already exists in list: networks containing: ['a1c5c227-8f1f-4881-a6d8-f446b64d9846']. ignoring it [ 1148.138892] env[62627]: WARNING nova.network.neutron [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] a1c5c227-8f1f-4881-a6d8-f446b64d9846 already exists in list: networks containing: ['a1c5c227-8f1f-4881-a6d8-f446b64d9846']. ignoring it [ 1148.139181] env[62627]: WARNING nova.network.neutron [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] aa0a7b01-9a61-4e32-82cb-8f849bff6266 already exists in list: port_ids containing: ['aa0a7b01-9a61-4e32-82cb-8f849bff6266']. ignoring it [ 1148.223592] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.863s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.226161] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195055, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.182476} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.226665] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1148.227657] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d788281-77d5-4108-9676-fbf978f5288f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.245886] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9869045-e1de-404f-9626-9f58de997e9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.259670] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 28e42d69-0c19-4e3c-ae49-9076fdc65aa8/28e42d69-0c19-4e3c-ae49-9076fdc65aa8.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1148.263606] env[62627]: INFO nova.scheduler.client.report [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Deleted allocations for instance f6357f01-d7cb-4477-be2d-98fc17d7a940 [ 1148.267947] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b94e70a-bf77-45d7-b652-c95a18fe7c19 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.289285] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Doing hard reboot of VM {{(pid=62627) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1148.289651] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-acf8cdcc-7424-47db-9768-bf833a74d858 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.292949] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1148.292949] env[62627]: value = "task-2195058" [ 1148.292949] env[62627]: _type = "Task" [ 1148.292949] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.303843] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195058, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.305075] env[62627]: DEBUG oslo_vmware.api [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1148.305075] env[62627]: value = "task-2195059" [ 1148.305075] env[62627]: _type = "Task" [ 1148.305075] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.315082] env[62627]: DEBUG oslo_vmware.api [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195059, 'name': ResetVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.401998] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195057, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.488274] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195053, 'name': CloneVM_Task, 'duration_secs': 1.95138} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.490869] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Created linked-clone VM from snapshot [ 1148.491719] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8b610c-48a6-4a87-b561-ee0513f32e72 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.501028] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Uploading image 6b01e108-c175-4bfa-86d7-7807135ee79e {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1148.523628] env[62627]: DEBUG oslo_vmware.rw_handles [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1148.523628] env[62627]: value = "vm-447791" [ 1148.523628] env[62627]: _type = "VirtualMachine" [ 1148.523628] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1148.523951] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f59ee7ca-bf39-43be-a3ad-0f17afe5650e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.535029] env[62627]: DEBUG oslo_vmware.rw_handles [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lease: (returnval){ [ 1148.535029] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5277f151-ce24-dbb6-797d-a426063d87a4" [ 1148.535029] env[62627]: _type = "HttpNfcLease" [ 1148.535029] env[62627]: } obtained for exporting VM: (result){ [ 1148.535029] env[62627]: value = "vm-447791" [ 1148.535029] env[62627]: _type = "VirtualMachine" [ 1148.535029] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1148.535410] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the lease: (returnval){ [ 1148.535410] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5277f151-ce24-dbb6-797d-a426063d87a4" [ 1148.535410] env[62627]: _type = "HttpNfcLease" [ 1148.535410] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1148.541957] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1148.541957] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5277f151-ce24-dbb6-797d-a426063d87a4" [ 1148.541957] env[62627]: _type = "HttpNfcLease" [ 1148.541957] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1148.675934] env[62627]: DEBUG nova.network.neutron [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updating instance_info_cache with network_info: [{"id": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "address": "fa:16:3e:05:76:ef", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3f3405-39", "ovs_interfaceid": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b8c79070-6650-4427-ab59-c910dccfba54", "address": "fa:16:3e:08:8f:ca", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8c79070-66", "ovs_interfaceid": "b8c79070-6650-4427-ab59-c910dccfba54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa0a7b01-9a61-4e32-82cb-8f849bff6266", "address": "fa:16:3e:fe:a1:e5", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa0a7b01-9a", "ovs_interfaceid": "aa0a7b01-9a61-4e32-82cb-8f849bff6266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.787198] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba47d5df-8aec-4758-93a2-2e27eea4eb86 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "f6357f01-d7cb-4477-be2d-98fc17d7a940" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.281s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.809094] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195058, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.819143] env[62627]: DEBUG oslo_vmware.api [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195059, 'name': ResetVM_Task, 'duration_secs': 0.107764} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.819441] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Did hard reboot of VM {{(pid=62627) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1148.819742] env[62627]: DEBUG nova.compute.manager [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1148.820605] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35546eb9-f07e-4bb6-bd08-2ce9b29d0fa4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.886033] env[62627]: DEBUG nova.compute.manager [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1148.900993] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195057, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.916599] env[62627]: DEBUG nova.virt.hardware [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:34:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='257731561',id=39,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1140026480',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1148.916994] env[62627]: DEBUG nova.virt.hardware [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1148.917145] env[62627]: DEBUG nova.virt.hardware [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1148.917360] env[62627]: DEBUG nova.virt.hardware [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1148.917508] env[62627]: DEBUG nova.virt.hardware [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1148.917681] env[62627]: DEBUG nova.virt.hardware [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1148.917929] env[62627]: DEBUG nova.virt.hardware [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1148.918156] env[62627]: DEBUG nova.virt.hardware [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1148.918337] env[62627]: DEBUG nova.virt.hardware [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1148.918563] env[62627]: DEBUG nova.virt.hardware [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1148.918698] env[62627]: DEBUG nova.virt.hardware [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1148.919630] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3e748f-dd23-4c11-9653-c9995fbb7cfa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.930200] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a667bf35-f131-4ab8-9271-3b5ce252b2d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.044326] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1149.044326] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5277f151-ce24-dbb6-797d-a426063d87a4" [ 1149.044326] env[62627]: _type = "HttpNfcLease" [ 1149.044326] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1149.044691] env[62627]: DEBUG oslo_vmware.rw_handles [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1149.044691] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5277f151-ce24-dbb6-797d-a426063d87a4" [ 1149.044691] env[62627]: _type = "HttpNfcLease" [ 1149.044691] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1149.045814] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b273dec6-ab48-4ae0-b92e-1a2a74c172ea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.056229] env[62627]: DEBUG oslo_vmware.rw_handles [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b0041a-95e7-b9be-3f7c-aa63c5753dfa/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1149.056350] env[62627]: DEBUG oslo_vmware.rw_handles [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b0041a-95e7-b9be-3f7c-aa63c5753dfa/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1149.129671] env[62627]: DEBUG nova.network.neutron [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Successfully updated port: 4534e6a9-471a-4703-aa7a-7aac3d20292f {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1149.141304] env[62627]: DEBUG nova.compute.manager [req-65aafc6f-5bf3-4bc1-9d9e-629f71d8d829 req-cb364edd-f4c2-4087-adfe-76f7d643a696 service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Received event network-vif-plugged-4534e6a9-471a-4703-aa7a-7aac3d20292f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1149.141470] env[62627]: DEBUG oslo_concurrency.lockutils [req-65aafc6f-5bf3-4bc1-9d9e-629f71d8d829 req-cb364edd-f4c2-4087-adfe-76f7d643a696 service nova] Acquiring lock "367a83b9-79f8-43b2-91dd-40d13193fa40-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1149.141695] env[62627]: DEBUG oslo_concurrency.lockutils [req-65aafc6f-5bf3-4bc1-9d9e-629f71d8d829 req-cb364edd-f4c2-4087-adfe-76f7d643a696 service nova] Lock "367a83b9-79f8-43b2-91dd-40d13193fa40-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.141989] env[62627]: DEBUG oslo_concurrency.lockutils [req-65aafc6f-5bf3-4bc1-9d9e-629f71d8d829 req-cb364edd-f4c2-4087-adfe-76f7d643a696 service nova] Lock "367a83b9-79f8-43b2-91dd-40d13193fa40-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.142082] env[62627]: DEBUG nova.compute.manager [req-65aafc6f-5bf3-4bc1-9d9e-629f71d8d829 req-cb364edd-f4c2-4087-adfe-76f7d643a696 service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] No waiting events found dispatching network-vif-plugged-4534e6a9-471a-4703-aa7a-7aac3d20292f {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1149.142238] env[62627]: WARNING nova.compute.manager [req-65aafc6f-5bf3-4bc1-9d9e-629f71d8d829 req-cb364edd-f4c2-4087-adfe-76f7d643a696 service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Received unexpected event network-vif-plugged-4534e6a9-471a-4703-aa7a-7aac3d20292f for instance with vm_state building and task_state spawning. [ 1149.179478] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.180171] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.180378] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.181491] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a011c4c-073c-48d1-a67f-66bf9af49995 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.200573] env[62627]: DEBUG nova.virt.hardware [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1149.200824] env[62627]: DEBUG nova.virt.hardware [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.200983] env[62627]: DEBUG nova.virt.hardware [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1149.201187] env[62627]: DEBUG nova.virt.hardware [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.201343] env[62627]: DEBUG nova.virt.hardware [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1149.201486] env[62627]: DEBUG nova.virt.hardware [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1149.201687] env[62627]: DEBUG nova.virt.hardware [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1149.201846] env[62627]: DEBUG nova.virt.hardware [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1149.202024] env[62627]: DEBUG nova.virt.hardware [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1149.202184] env[62627]: DEBUG nova.virt.hardware [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1149.202355] env[62627]: DEBUG nova.virt.hardware [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1149.208572] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Reconfiguring VM to attach interface {{(pid=62627) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1149.209285] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ff1b001-45a4-4b32-8d09-28b0b9403c00 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.225489] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0229fc7a-48db-474d-b583-817a85dc61c7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.232928] env[62627]: DEBUG oslo_vmware.api [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1149.232928] env[62627]: value = "task-2195061" [ 1149.232928] env[62627]: _type = "Task" [ 1149.232928] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.243205] env[62627]: DEBUG oslo_vmware.api [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195061, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.308228] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195058, 'name': ReconfigVM_Task, 'duration_secs': 0.728239} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.308512] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 28e42d69-0c19-4e3c-ae49-9076fdc65aa8/28e42d69-0c19-4e3c-ae49-9076fdc65aa8.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1149.311141] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7901813e-9541-4a73-a4ad-a125471b6c1e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.317190] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1149.317190] env[62627]: value = "task-2195062" [ 1149.317190] env[62627]: _type = "Task" [ 1149.317190] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.330680] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195062, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.336417] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fcd4685-1a84-4666-b933-0136a997121a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.470s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.402673] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195057, 'name': PowerOffVM_Task, 'duration_secs': 1.222932} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.402920] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1149.403194] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance 'abb14e3e-b521-4f21-bc94-6bbb66fbee57' progress to 17 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1149.633230] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "refresh_cache-367a83b9-79f8-43b2-91dd-40d13193fa40" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.633419] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquired lock "refresh_cache-367a83b9-79f8-43b2-91dd-40d13193fa40" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.633754] env[62627]: DEBUG nova.network.neutron [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.746707] env[62627]: DEBUG oslo_vmware.api [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.830179] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195062, 'name': Rename_Task, 'duration_secs': 0.208763} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.830688] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1149.831154] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5570f6c3-a3cb-4591-b9ea-583989fb05b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.840024] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1149.840024] env[62627]: value = "task-2195064" [ 1149.840024] env[62627]: _type = "Task" [ 1149.840024] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.848956] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.912358] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1149.912746] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.912925] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1149.913160] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.913419] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1149.913715] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1149.914080] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1149.914303] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1149.914559] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1149.915025] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1149.915242] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1149.920908] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9e01396-a395-41d0-9b1d-0ed76814f579 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.939038] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1149.939038] env[62627]: value = "task-2195065" [ 1149.939038] env[62627]: _type = "Task" [ 1149.939038] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.949699] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195065, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.183266] env[62627]: DEBUG nova.network.neutron [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1150.246318] env[62627]: DEBUG oslo_vmware.api [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195061, 'name': ReconfigVM_Task, 'duration_secs': 0.593314} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.247019] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1150.247256] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Reconfigured VM to attach interface {{(pid=62627) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1150.353569] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195064, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.356509] env[62627]: DEBUG nova.network.neutron [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Updating instance_info_cache with network_info: [{"id": "4534e6a9-471a-4703-aa7a-7aac3d20292f", "address": "fa:16:3e:55:aa:31", "network": {"id": "e4275dc3-df4b-49c1-9677-3c0fc9408d2e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1014582117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711f8ab1a2d5466399f408870e412587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4534e6a9-47", "ovs_interfaceid": "4534e6a9-471a-4703-aa7a-7aac3d20292f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.450028] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195065, 'name': ReconfigVM_Task, 'duration_secs': 0.250636} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.450447] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance 'abb14e3e-b521-4f21-bc94-6bbb66fbee57' progress to 33 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1150.537375] env[62627]: DEBUG nova.compute.manager [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Stashing vm_state: active {{(pid=62627) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1150.752643] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b1c70210-0f0c-4d59-9eaf-fb6af369ecab tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-aa0a7b01-9a61-4e32-82cb-8f849bff6266" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.519s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.850726] env[62627]: DEBUG oslo_vmware.api [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195064, 'name': PowerOnVM_Task, 'duration_secs': 0.574154} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.851096] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1150.851357] env[62627]: INFO nova.compute.manager [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Took 9.06 seconds to spawn the instance on the hypervisor. [ 1150.851583] env[62627]: DEBUG nova.compute.manager [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1150.852437] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d19dd0-ffb9-4d0a-8a04-33c5d8318926 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.859216] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Releasing lock "refresh_cache-367a83b9-79f8-43b2-91dd-40d13193fa40" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1150.860051] env[62627]: DEBUG nova.compute.manager [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Instance network_info: |[{"id": "4534e6a9-471a-4703-aa7a-7aac3d20292f", "address": "fa:16:3e:55:aa:31", "network": {"id": "e4275dc3-df4b-49c1-9677-3c0fc9408d2e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1014582117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711f8ab1a2d5466399f408870e412587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4534e6a9-47", "ovs_interfaceid": "4534e6a9-471a-4703-aa7a-7aac3d20292f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1150.863778] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:aa:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e83154-c0d2-4d3d-b95e-3cd5ba336257', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4534e6a9-471a-4703-aa7a-7aac3d20292f', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1150.871587] env[62627]: DEBUG oslo.service.loopingcall [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1150.872084] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1150.872412] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db79540d-d0c5-45c6-b2a9-77fa6811f8e5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.894942] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1150.894942] env[62627]: value = "task-2195066" [ 1150.894942] env[62627]: _type = "Task" [ 1150.894942] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.905376] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195066, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.957128] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1150.957426] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1150.957667] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1150.957929] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1150.958138] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1150.958348] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1150.958659] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1150.958824] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1150.958990] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1150.959269] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1150.959452] env[62627]: DEBUG nova.virt.hardware [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1150.964629] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Reconfiguring VM instance instance-0000005a to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1150.965282] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.966086] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.966876] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0de6d7bc-48da-4e58-a36a-a5354d40b5dc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.991458] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1150.991458] env[62627]: value = "task-2195067" [ 1150.991458] env[62627]: _type = "Task" [ 1150.991458] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.004028] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195067, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.060596] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.060596] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.198632] env[62627]: DEBUG nova.compute.manager [req-766ddd8d-79cd-4bc2-a18a-271c0d17961c req-d92f3d35-ec97-483f-9e4c-1c478898bc11 service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Received event network-changed-4534e6a9-471a-4703-aa7a-7aac3d20292f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1151.199240] env[62627]: DEBUG nova.compute.manager [req-766ddd8d-79cd-4bc2-a18a-271c0d17961c req-d92f3d35-ec97-483f-9e4c-1c478898bc11 service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Refreshing instance network info cache due to event network-changed-4534e6a9-471a-4703-aa7a-7aac3d20292f. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1151.199240] env[62627]: DEBUG oslo_concurrency.lockutils [req-766ddd8d-79cd-4bc2-a18a-271c0d17961c req-d92f3d35-ec97-483f-9e4c-1c478898bc11 service nova] Acquiring lock "refresh_cache-367a83b9-79f8-43b2-91dd-40d13193fa40" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.199327] env[62627]: DEBUG oslo_concurrency.lockutils [req-766ddd8d-79cd-4bc2-a18a-271c0d17961c req-d92f3d35-ec97-483f-9e4c-1c478898bc11 service nova] Acquired lock "refresh_cache-367a83b9-79f8-43b2-91dd-40d13193fa40" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.199447] env[62627]: DEBUG nova.network.neutron [req-766ddd8d-79cd-4bc2-a18a-271c0d17961c req-d92f3d35-ec97-483f-9e4c-1c478898bc11 service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Refreshing network info cache for port 4534e6a9-471a-4703-aa7a-7aac3d20292f {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.382192] env[62627]: INFO nova.compute.manager [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Took 14.66 seconds to build instance. [ 1151.406223] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195066, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.482298] env[62627]: INFO nova.compute.manager [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Detaching volume 05c71695-2066-4611-ae96-3ddb34d2b201 [ 1151.504446] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195067, 'name': ReconfigVM_Task, 'duration_secs': 0.216651} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.504814] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Reconfigured VM instance instance-0000005a to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1151.505645] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ef28a8-7f10-460d-b584-5f1ba46e80bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.529669] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] abb14e3e-b521-4f21-bc94-6bbb66fbee57/abb14e3e-b521-4f21-bc94-6bbb66fbee57.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.530865] env[62627]: INFO nova.virt.block_device [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Attempting to driver detach volume 05c71695-2066-4611-ae96-3ddb34d2b201 from mountpoint /dev/sdb [ 1151.531088] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1151.531284] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447765', 'volume_id': '05c71695-2066-4611-ae96-3ddb34d2b201', 'name': 'volume-05c71695-2066-4611-ae96-3ddb34d2b201', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '457e5aa6-d229-4f48-8817-e918ec4bd0f9', 'attached_at': '', 'detached_at': '', 'volume_id': '05c71695-2066-4611-ae96-3ddb34d2b201', 'serial': '05c71695-2066-4611-ae96-3ddb34d2b201'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1151.531880] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cbaf311-2aa2-4f53-85b1-9b09c584c6e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.545016] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed558896-eb81-4026-a394-6a67518b8b97 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.569974] env[62627]: INFO nova.compute.claims [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1151.575141] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0605d0fc-0c0f-4ff6-8404-3a8de8db7ea7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.578121] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1151.578121] env[62627]: value = "task-2195068" [ 1151.578121] env[62627]: _type = "Task" [ 1151.578121] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.585197] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d3e5d7-2e71-4a9f-97f2-540a8ffe87db {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.591056] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195068, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.611748] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c7a0ba-2ae4-424f-ba9f-31c4d6694c53 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.627717] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] The volume has not been displaced from its original location: [datastore2] volume-05c71695-2066-4611-ae96-3ddb34d2b201/volume-05c71695-2066-4611-ae96-3ddb34d2b201.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1151.633021] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Reconfiguring VM instance instance-00000033 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1151.633364] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a4530ed-ba9e-4749-beb7-aaa3af23d164 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.653272] env[62627]: DEBUG oslo_vmware.api [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1151.653272] env[62627]: value = "task-2195069" [ 1151.653272] env[62627]: _type = "Task" [ 1151.653272] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.662420] env[62627]: DEBUG oslo_vmware.api [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195069, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.675396] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.886487] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6d0287b8-3a63-43bf-9e20-f85b0d12c9c5 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.169s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.886906] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.211s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.886906] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.887251] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.887456] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.890052] env[62627]: INFO nova.compute.manager [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Terminating instance [ 1151.905877] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195066, 'name': CreateVM_Task, 'duration_secs': 0.80256} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.906981] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1151.907835] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.907979] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.908386] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1151.911512] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9131bb38-aabc-4a7b-abf5-95a77bc61978 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.917684] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1151.917684] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c62bcd-9358-0c0f-acf9-12ca2687e5ad" [ 1151.917684] env[62627]: _type = "Task" [ 1151.917684] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.927882] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c62bcd-9358-0c0f-acf9-12ca2687e5ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.026291] env[62627]: DEBUG nova.network.neutron [req-766ddd8d-79cd-4bc2-a18a-271c0d17961c req-d92f3d35-ec97-483f-9e4c-1c478898bc11 service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Updated VIF entry in instance network info cache for port 4534e6a9-471a-4703-aa7a-7aac3d20292f. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1152.026722] env[62627]: DEBUG nova.network.neutron [req-766ddd8d-79cd-4bc2-a18a-271c0d17961c req-d92f3d35-ec97-483f-9e4c-1c478898bc11 service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Updating instance_info_cache with network_info: [{"id": "4534e6a9-471a-4703-aa7a-7aac3d20292f", "address": "fa:16:3e:55:aa:31", "network": {"id": "e4275dc3-df4b-49c1-9677-3c0fc9408d2e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1014582117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711f8ab1a2d5466399f408870e412587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4534e6a9-47", "ovs_interfaceid": "4534e6a9-471a-4703-aa7a-7aac3d20292f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.080796] env[62627]: INFO nova.compute.resource_tracker [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating resource usage from migration 06213fc3-8b9e-48ce-841a-c124a8ef7acb [ 1152.093787] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195068, 'name': ReconfigVM_Task, 'duration_secs': 0.337415} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.094086] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Reconfigured VM instance instance-0000005a to attach disk [datastore2] abb14e3e-b521-4f21-bc94-6bbb66fbee57/abb14e3e-b521-4f21-bc94-6bbb66fbee57.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.094401] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance 'abb14e3e-b521-4f21-bc94-6bbb66fbee57' progress to 50 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1152.165914] env[62627]: DEBUG oslo_vmware.api [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195069, 'name': ReconfigVM_Task, 'duration_secs': 0.323301} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.166186] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Reconfigured VM instance instance-00000033 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1152.171419] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ae186eb-4317-4917-af4f-088ec5a122fb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.191802] env[62627]: DEBUG oslo_vmware.api [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1152.191802] env[62627]: value = "task-2195071" [ 1152.191802] env[62627]: _type = "Task" [ 1152.191802] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.205294] env[62627]: DEBUG oslo_vmware.api [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195071, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.319879] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99cc194-88c9-4c86-abb1-bc12c42f8a7e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.330499] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13eb613-ea34-4d4f-a538-e7e3626bbd47 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.367801] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b813df-f0d5-47fa-9d61-d8d5b55c074b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.379776] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb5c617-6111-471c-bf44-f1e2573192aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.395395] env[62627]: DEBUG nova.compute.provider_tree [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.400629] env[62627]: DEBUG nova.compute.manager [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1152.400827] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1152.401679] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be99ecfc-48e4-45b7-9003-da9f0f901001 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.410249] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1152.410508] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89f248a6-62aa-4d6b-9f33-6a3a9c554e4e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.419739] env[62627]: DEBUG oslo_vmware.api [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1152.419739] env[62627]: value = "task-2195072" [ 1152.419739] env[62627]: _type = "Task" [ 1152.419739] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.431849] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c62bcd-9358-0c0f-acf9-12ca2687e5ad, 'name': SearchDatastore_Task, 'duration_secs': 0.014927} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.435024] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.435278] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1152.435514] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.435661] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.435836] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1152.436110] env[62627]: DEBUG oslo_vmware.api [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.436338] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c3518f3-2358-434f-8397-2f275e4beb0e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.446166] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1152.446335] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1152.447119] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68331a54-142a-456d-9369-d578e4982a34 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.454076] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1152.454076] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e2fc48-d26b-db53-4df2-5c65aac9a32c" [ 1152.454076] env[62627]: _type = "Task" [ 1152.454076] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.462546] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e2fc48-d26b-db53-4df2-5c65aac9a32c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.530383] env[62627]: DEBUG oslo_concurrency.lockutils [req-766ddd8d-79cd-4bc2-a18a-271c0d17961c req-d92f3d35-ec97-483f-9e4c-1c478898bc11 service nova] Releasing lock "refresh_cache-367a83b9-79f8-43b2-91dd-40d13193fa40" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.601789] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46c5ae7-9b13-49b9-a1aa-e0abeb63a4b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.621343] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7edc959-a3e6-4ee6-8214-9118583ed480 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.640721] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance 'abb14e3e-b521-4f21-bc94-6bbb66fbee57' progress to 67 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1152.702087] env[62627]: DEBUG oslo_vmware.api [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195071, 'name': ReconfigVM_Task, 'duration_secs': 0.181213} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.702413] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447765', 'volume_id': '05c71695-2066-4611-ae96-3ddb34d2b201', 'name': 'volume-05c71695-2066-4611-ae96-3ddb34d2b201', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '457e5aa6-d229-4f48-8817-e918ec4bd0f9', 'attached_at': '', 'detached_at': '', 'volume_id': '05c71695-2066-4611-ae96-3ddb34d2b201', 'serial': '05c71695-2066-4611-ae96-3ddb34d2b201'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1152.839394] env[62627]: DEBUG oslo_concurrency.lockutils [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-b8c79070-6650-4427-ab59-c910dccfba54" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.839859] env[62627]: DEBUG oslo_concurrency.lockutils [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-b8c79070-6650-4427-ab59-c910dccfba54" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.900993] env[62627]: DEBUG nova.scheduler.client.report [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1152.936438] env[62627]: DEBUG oslo_vmware.api [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195072, 'name': PowerOffVM_Task, 'duration_secs': 0.367954} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.936708] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1152.936879] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1152.937204] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a2503b5-cb72-48eb-abc5-2b4ccb2519c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.964304] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e2fc48-d26b-db53-4df2-5c65aac9a32c, 'name': SearchDatastore_Task, 'duration_secs': 0.026154} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.965235] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7897a615-4a05-48b6-9911-b0e584765277 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.971036] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1152.971036] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5256551d-eaae-d9cb-e078-049b93c940b8" [ 1152.971036] env[62627]: _type = "Task" [ 1152.971036] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.979517] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5256551d-eaae-d9cb-e078-049b93c940b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.008919] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1153.009235] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1153.009526] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Deleting the datastore file [datastore2] 28e42d69-0c19-4e3c-ae49-9076fdc65aa8 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1153.009822] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec74040c-c170-4d85-a34b-24ff7ed9c0e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.019080] env[62627]: DEBUG oslo_vmware.api [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1153.019080] env[62627]: value = "task-2195074" [ 1153.019080] env[62627]: _type = "Task" [ 1153.019080] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.027393] env[62627]: DEBUG oslo_vmware.api [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.186227] env[62627]: DEBUG nova.network.neutron [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Port 9297477b-3156-43b7-b7ca-4c2d76609f24 binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1153.245204] env[62627]: DEBUG nova.objects.instance [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lazy-loading 'flavor' on Instance uuid 457e5aa6-d229-4f48-8817-e918ec4bd0f9 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.342123] env[62627]: DEBUG oslo_concurrency.lockutils [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.342332] env[62627]: DEBUG oslo_concurrency.lockutils [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.343234] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6fbace-bb42-486f-b0f3-9258eca19018 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.363929] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9890149-a33b-440c-b802-578d447678bf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.393911] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Reconfiguring VM to detach interface {{(pid=62627) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1153.394803] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0597ed76-5941-41d6-a39b-b9a4edb96800 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.408492] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.349s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.408695] env[62627]: INFO nova.compute.manager [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Migrating [ 1153.425064] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1153.425064] env[62627]: value = "task-2195075" [ 1153.425064] env[62627]: _type = "Task" [ 1153.425064] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.441113] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.484520] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5256551d-eaae-d9cb-e078-049b93c940b8, 'name': SearchDatastore_Task, 'duration_secs': 0.018317} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.484882] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.485379] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 367a83b9-79f8-43b2-91dd-40d13193fa40/367a83b9-79f8-43b2-91dd-40d13193fa40.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1153.485617] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce41aa8c-9890-4449-b9f7-7c87f93515f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.497031] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1153.497031] env[62627]: value = "task-2195076" [ 1153.497031] env[62627]: _type = "Task" [ 1153.497031] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.506282] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.529128] env[62627]: DEBUG oslo_vmware.api [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.926706] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.926706] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.926706] env[62627]: DEBUG nova.network.neutron [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1153.938750] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.009604] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195076, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.031586] env[62627]: DEBUG oslo_vmware.api [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195074, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.564618} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.031814] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1154.032024] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1154.032227] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1154.032408] env[62627]: INFO nova.compute.manager [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1154.032657] env[62627]: DEBUG oslo.service.loopingcall [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1154.033136] env[62627]: DEBUG nova.compute.manager [-] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1154.033260] env[62627]: DEBUG nova.network.neutron [-] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1154.218250] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.218250] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.218250] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.252802] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1c7cdcf-9da5-46b1-ae71-de8a2cd75ab5 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.287s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.396335] env[62627]: DEBUG nova.compute.manager [req-10662946-03b4-4faf-b8ba-9a95a72add0f req-6aee30f2-4c31-492c-bf7d-9081a21455d5 service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Received event network-vif-deleted-37591ab2-0826-44b0-99ae-03059115c799 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1154.396572] env[62627]: INFO nova.compute.manager [req-10662946-03b4-4faf-b8ba-9a95a72add0f req-6aee30f2-4c31-492c-bf7d-9081a21455d5 service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Neutron deleted interface 37591ab2-0826-44b0-99ae-03059115c799; detaching it from the instance and deleting it from the info cache [ 1154.396712] env[62627]: DEBUG nova.network.neutron [req-10662946-03b4-4faf-b8ba-9a95a72add0f req-6aee30f2-4c31-492c-bf7d-9081a21455d5 service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.441080] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.510215] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195076, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.790762} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.510477] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore1] 367a83b9-79f8-43b2-91dd-40d13193fa40/367a83b9-79f8-43b2-91dd-40d13193fa40.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1154.510700] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1154.513895] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5b20301-85d0-4cf6-92b4-954264d1c795 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.522900] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1154.522900] env[62627]: value = "task-2195078" [ 1154.522900] env[62627]: _type = "Task" [ 1154.522900] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.533567] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195078, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.673854] env[62627]: DEBUG nova.network.neutron [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance_info_cache with network_info: [{"id": "9830a1ce-942e-4954-8513-f624e13d304c", "address": "fa:16:3e:bb:02:dc", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9830a1ce-94", "ovs_interfaceid": "9830a1ce-942e-4954-8513-f624e13d304c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.873214] env[62627]: DEBUG nova.network.neutron [-] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.899613] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a9c0c57-a39f-441a-af9d-6ccfdadca969 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.910937] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcbb483-8322-4feb-b5f2-434cb423fe43 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.953030] env[62627]: DEBUG nova.compute.manager [req-10662946-03b4-4faf-b8ba-9a95a72add0f req-6aee30f2-4c31-492c-bf7d-9081a21455d5 service nova] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Detach interface failed, port_id=37591ab2-0826-44b0-99ae-03059115c799, reason: Instance 28e42d69-0c19-4e3c-ae49-9076fdc65aa8 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1154.959810] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.035082] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195078, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.17312} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.035380] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1155.036214] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdffcd88-440a-4ec6-b3bf-43e3d17da77d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.059062] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 367a83b9-79f8-43b2-91dd-40d13193fa40/367a83b9-79f8-43b2-91dd-40d13193fa40.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1155.059382] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf7ef436-f688-45b9-8ff3-125290f8a328 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.079952] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1155.079952] env[62627]: value = "task-2195079" [ 1155.079952] env[62627]: _type = "Task" [ 1155.079952] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.088663] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195079, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.177233] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.298221] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.298565] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.298880] env[62627]: DEBUG nova.network.neutron [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1155.329618] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.329618] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.329618] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.329618] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.329618] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.332290] env[62627]: INFO nova.compute.manager [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Terminating instance [ 1155.378108] env[62627]: INFO nova.compute.manager [-] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Took 1.34 seconds to deallocate network for instance. [ 1155.440188] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.594558] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195079, 'name': ReconfigVM_Task, 'duration_secs': 0.342122} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.595016] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 367a83b9-79f8-43b2-91dd-40d13193fa40/367a83b9-79f8-43b2-91dd-40d13193fa40.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1155.595494] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62627) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1155.596387] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-18fcb534-20ea-464e-b2dd-ef1f2ced7465 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.606105] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1155.606105] env[62627]: value = "task-2195080" [ 1155.606105] env[62627]: _type = "Task" [ 1155.606105] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.615206] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195080, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.837757] env[62627]: DEBUG nova.compute.manager [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1155.838035] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1155.838922] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33093b05-5f4a-4be8-b99e-718d8d48aa05 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.846919] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1155.847242] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26fad841-8dbc-4a0a-a0d7-a3f24548e497 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.853848] env[62627]: DEBUG oslo_vmware.api [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1155.853848] env[62627]: value = "task-2195081" [ 1155.853848] env[62627]: _type = "Task" [ 1155.853848] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.862666] env[62627]: DEBUG oslo_vmware.api [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195081, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.887275] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.887533] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.887787] env[62627]: DEBUG nova.objects.instance [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lazy-loading 'resources' on Instance uuid 28e42d69-0c19-4e3c-ae49-9076fdc65aa8 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1155.939503] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.120321] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195080, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.059102} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.120321] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62627) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1156.120321] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a857b9c-48f7-4452-b147-6e18a1b31b74 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.154107] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 367a83b9-79f8-43b2-91dd-40d13193fa40/ephemeral_0.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1156.154651] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a33d6b2f-f73e-4980-91d1-d861ac7265d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.176115] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1156.176115] env[62627]: value = "task-2195082" [ 1156.176115] env[62627]: _type = "Task" [ 1156.176115] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.188777] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195082, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.247040] env[62627]: DEBUG nova.network.neutron [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance_info_cache with network_info: [{"id": "9297477b-3156-43b7-b7ca-4c2d76609f24", "address": "fa:16:3e:93:95:7a", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9297477b-31", "ovs_interfaceid": "9297477b-3156-43b7-b7ca-4c2d76609f24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.365456] env[62627]: DEBUG oslo_vmware.api [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195081, 'name': PowerOffVM_Task, 'duration_secs': 0.246601} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.365671] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1156.365961] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1156.366163] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75529520-1235-4273-ab6f-a868e3c49333 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.441396] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.441846] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.441993] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Deleting the datastore file [datastore2] 457e5aa6-d229-4f48-8817-e918ec4bd0f9 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.445359] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2a3e142-9365-4921-9f69-1aaf4da05214 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.447574] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.454015] env[62627]: DEBUG oslo_vmware.api [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1156.454015] env[62627]: value = "task-2195084" [ 1156.454015] env[62627]: _type = "Task" [ 1156.454015] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.462567] env[62627]: DEBUG oslo_vmware.api [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.619074] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4703cea-107f-49a7-b0ca-cc91ac6e2dad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.628566] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf36b4d2-cedd-4d8c-9561-42e9917593af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.660364] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2666351-184f-4120-b0a5-9c4882dc4e65 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.689490] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed61dae-84ce-4ec5-b588-cf9414200be9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.693547] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195082, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.698195] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d181b453-b923-4566-88ad-f684e47980d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.710141] env[62627]: DEBUG nova.compute.provider_tree [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.728688] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance '634d547b-3b5e-4185-8229-daf055ca6e69' progress to 0 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1156.749790] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1156.941682] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.963981] env[62627]: DEBUG oslo_vmware.api [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.446013} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.964350] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.964541] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1156.964715] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1156.964886] env[62627]: INFO nova.compute.manager [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1156.965189] env[62627]: DEBUG oslo.service.loopingcall [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1156.965395] env[62627]: DEBUG nova.compute.manager [-] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1156.965490] env[62627]: DEBUG nova.network.neutron [-] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1157.187354] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195082, 'name': ReconfigVM_Task, 'duration_secs': 0.523942} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.187749] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 367a83b9-79f8-43b2-91dd-40d13193fa40/ephemeral_0.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1157.188361] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa7a9644-897a-4d59-ad7f-7b4b21bc2bd1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.196633] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1157.196633] env[62627]: value = "task-2195085" [ 1157.196633] env[62627]: _type = "Task" [ 1157.196633] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.208195] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195085, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.214558] env[62627]: DEBUG nova.scheduler.client.report [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.237675] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1157.237962] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-699fcb02-45e6-44f2-bbf7-313b5874d1ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.247303] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1157.247303] env[62627]: value = "task-2195086" [ 1157.247303] env[62627]: _type = "Task" [ 1157.247303] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.261187] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195086, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.278509] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb60972a-320b-45d1-bd41-efd1b1f75783 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.301050] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc63715-f62d-4903-aa5b-b05363c7a008 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.309616] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance 'abb14e3e-b521-4f21-bc94-6bbb66fbee57' progress to 83 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1157.445637] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.503706] env[62627]: DEBUG nova.compute.manager [req-ae6140f1-f42f-460c-bba4-102674fa97d1 req-f2398cd4-b0e2-4c26-938c-148040c3405a service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Received event network-vif-deleted-62b171c6-c991-4816-86e3-ba8a0862f911 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1157.503956] env[62627]: INFO nova.compute.manager [req-ae6140f1-f42f-460c-bba4-102674fa97d1 req-f2398cd4-b0e2-4c26-938c-148040c3405a service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Neutron deleted interface 62b171c6-c991-4816-86e3-ba8a0862f911; detaching it from the instance and deleting it from the info cache [ 1157.504162] env[62627]: DEBUG nova.network.neutron [req-ae6140f1-f42f-460c-bba4-102674fa97d1 req-f2398cd4-b0e2-4c26-938c-148040c3405a service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.707769] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195085, 'name': Rename_Task, 'duration_secs': 0.2287} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.708090] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1157.708370] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3912b2a4-ea25-4596-8c6b-22d3fdc13cd0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.716605] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1157.716605] env[62627]: value = "task-2195087" [ 1157.716605] env[62627]: _type = "Task" [ 1157.716605] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.720331] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.833s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.731384] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.743772] env[62627]: INFO nova.scheduler.client.report [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Deleted allocations for instance 28e42d69-0c19-4e3c-ae49-9076fdc65aa8 [ 1157.761202] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195086, 'name': PowerOffVM_Task, 'duration_secs': 0.20715} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.761534] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1157.761733] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance '634d547b-3b5e-4185-8229-daf055ca6e69' progress to 17 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1157.816578] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1157.817814] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f86532e6-1b99-42a0-8cc8-8bdbe4bc6f2f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.827854] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1157.827854] env[62627]: value = "task-2195088" [ 1157.827854] env[62627]: _type = "Task" [ 1157.827854] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.838047] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195088, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.944235] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.984548] env[62627]: DEBUG nova.network.neutron [-] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.007270] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0befc4a8-5c2e-40cd-bc78-2ce79ff0e2a1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.020619] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22aa41df-7cbf-4dae-b17b-b7c8c0c4e368 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.061520] env[62627]: DEBUG nova.compute.manager [req-ae6140f1-f42f-460c-bba4-102674fa97d1 req-f2398cd4-b0e2-4c26-938c-148040c3405a service nova] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Detach interface failed, port_id=62b171c6-c991-4816-86e3-ba8a0862f911, reason: Instance 457e5aa6-d229-4f48-8817-e918ec4bd0f9 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1158.166810] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Acquiring lock "c25fe185-2d6a-40a8-be06-84ee22a5464e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.167118] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "c25fe185-2d6a-40a8-be06-84ee22a5464e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.229778] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195087, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.252771] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a3d83ed9-2251-4adf-be7e-c2086b039478 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "28e42d69-0c19-4e3c-ae49-9076fdc65aa8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.366s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.274740] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1158.275075] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1158.275221] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1158.275432] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1158.275587] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1158.275737] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1158.276540] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1158.276540] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1158.276713] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1158.276791] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1158.276984] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1158.285028] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c94b9a34-2f3e-46ea-a7ce-02173cf8b3f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.302487] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1158.302487] env[62627]: value = "task-2195089" [ 1158.302487] env[62627]: _type = "Task" [ 1158.302487] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.312181] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195089, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.338446] env[62627]: DEBUG oslo_vmware.api [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195088, 'name': PowerOnVM_Task, 'duration_secs': 0.42657} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.338753] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1158.338965] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d0562c-cd65-43fc-b0a8-57d6f5e0f92f tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance 'abb14e3e-b521-4f21-bc94-6bbb66fbee57' progress to 100 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1158.444179] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.490648] env[62627]: INFO nova.compute.manager [-] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Took 1.52 seconds to deallocate network for instance. [ 1158.669571] env[62627]: DEBUG nova.compute.manager [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1158.728082] env[62627]: DEBUG oslo_vmware.api [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195087, 'name': PowerOnVM_Task, 'duration_secs': 0.538286} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.728396] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1158.728624] env[62627]: INFO nova.compute.manager [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Took 9.84 seconds to spawn the instance on the hypervisor. [ 1158.728810] env[62627]: DEBUG nova.compute.manager [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1158.729677] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98394c3-e53b-44e2-b2de-ec50fa7fb082 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.813984] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195089, 'name': ReconfigVM_Task, 'duration_secs': 0.230585} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.814395] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance '634d547b-3b5e-4185-8229-daf055ca6e69' progress to 33 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1158.926483] env[62627]: DEBUG oslo_vmware.rw_handles [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b0041a-95e7-b9be-3f7c-aa63c5753dfa/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1158.927675] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049a277a-4847-4963-b2f5-5849e20fd2dc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.935166] env[62627]: DEBUG oslo_vmware.rw_handles [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b0041a-95e7-b9be-3f7c-aa63c5753dfa/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1158.935413] env[62627]: ERROR oslo_vmware.rw_handles [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b0041a-95e7-b9be-3f7c-aa63c5753dfa/disk-0.vmdk due to incomplete transfer. [ 1158.938617] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-08c3016f-5aa8-4852-821b-0961aa00a7f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.946263] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.947410] env[62627]: DEBUG oslo_vmware.rw_handles [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b0041a-95e7-b9be-3f7c-aa63c5753dfa/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1158.947648] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Uploaded image 6b01e108-c175-4bfa-86d7-7807135ee79e to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1158.950466] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1158.950739] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0bb9c006-a196-40bf-84b2-e098c3a0c1e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.958255] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1158.958255] env[62627]: value = "task-2195090" [ 1158.958255] env[62627]: _type = "Task" [ 1158.958255] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.966498] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195090, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.998417] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.998710] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.998977] env[62627]: DEBUG nova.objects.instance [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lazy-loading 'resources' on Instance uuid 457e5aa6-d229-4f48-8817-e918ec4bd0f9 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1159.194695] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.252034] env[62627]: INFO nova.compute.manager [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Took 16.79 seconds to build instance. [ 1159.321146] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1159.321731] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.321731] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1159.321731] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.321910] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1159.322021] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1159.322326] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1159.322432] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1159.322604] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1159.322766] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1159.322937] env[62627]: DEBUG nova.virt.hardware [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1159.328468] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Reconfiguring VM instance instance-0000005b to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1159.328749] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76397c2e-980f-4df9-821b-c9ed8d859428 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.353446] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1159.353446] env[62627]: value = "task-2195091" [ 1159.353446] env[62627]: _type = "Task" [ 1159.353446] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.362985] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195091, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.446860] env[62627]: DEBUG oslo_vmware.api [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195075, 'name': ReconfigVM_Task, 'duration_secs': 5.887956} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.447145] env[62627]: DEBUG oslo_concurrency.lockutils [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.448286] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Reconfigured VM to detach interface {{(pid=62627) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1159.467986] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195090, 'name': Destroy_Task, 'duration_secs': 0.344456} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.468628] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Destroyed the VM [ 1159.468875] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1159.469179] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3af64db4-1960-4b98-b91b-af364f2de3b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.477337] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1159.477337] env[62627]: value = "task-2195092" [ 1159.477337] env[62627]: _type = "Task" [ 1159.477337] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.486353] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195092, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.707456] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5cc444-c1d9-4985-850d-922185980b57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.715533] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f17e46-917c-4086-9cb7-8e33b1839cf5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.749997] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75da16ad-f2b9-47b8-b06b-e7b9ec57af53 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.753730] env[62627]: DEBUG nova.compute.manager [req-70b3910d-4b9b-48ae-9a03-4c0427ec25b1 req-8c62e46e-de38-41b1-95d2-422a523753d5 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received event network-vif-deleted-b8c79070-6650-4427-ab59-c910dccfba54 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1159.753916] env[62627]: INFO nova.compute.manager [req-70b3910d-4b9b-48ae-9a03-4c0427ec25b1 req-8c62e46e-de38-41b1-95d2-422a523753d5 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Neutron deleted interface b8c79070-6650-4427-ab59-c910dccfba54; detaching it from the instance and deleting it from the info cache [ 1159.754206] env[62627]: DEBUG nova.network.neutron [req-70b3910d-4b9b-48ae-9a03-4c0427ec25b1 req-8c62e46e-de38-41b1-95d2-422a523753d5 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updating instance_info_cache with network_info: [{"id": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "address": "fa:16:3e:05:76:ef", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3f3405-39", "ovs_interfaceid": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa0a7b01-9a61-4e32-82cb-8f849bff6266", "address": "fa:16:3e:fe:a1:e5", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa0a7b01-9a", "ovs_interfaceid": "aa0a7b01-9a61-4e32-82cb-8f849bff6266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.758819] env[62627]: DEBUG oslo_concurrency.lockutils [None req-44ffe4b6-057e-4e06-9d5e-f733852b7981 tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "367a83b9-79f8-43b2-91dd-40d13193fa40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.315s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.766048] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01439cb-c771-442f-a518-2d90818a5dce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.782561] env[62627]: DEBUG nova.compute.provider_tree [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1159.875637] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195091, 'name': ReconfigVM_Task, 'duration_secs': 0.295578} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.875919] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Reconfigured VM instance instance-0000005b to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1159.876746] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e529b71-699d-4c6e-9b0a-0a3bb35386ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.900709] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 634d547b-3b5e-4185-8229-daf055ca6e69/634d547b-3b5e-4185-8229-daf055ca6e69.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1159.902044] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-113b33b1-d52c-4e26-b632-0ac1f82f5dad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.924570] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1159.924570] env[62627]: value = "task-2195093" [ 1159.924570] env[62627]: _type = "Task" [ 1159.924570] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.935617] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195093, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.988396] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195092, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.259687] env[62627]: DEBUG oslo_concurrency.lockutils [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1160.260016] env[62627]: DEBUG oslo_concurrency.lockutils [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.260077] env[62627]: DEBUG nova.network.neutron [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1160.261636] env[62627]: DEBUG oslo_concurrency.lockutils [req-70b3910d-4b9b-48ae-9a03-4c0427ec25b1 req-8c62e46e-de38-41b1-95d2-422a523753d5 service nova] Acquiring lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1160.261831] env[62627]: DEBUG oslo_concurrency.lockutils [req-70b3910d-4b9b-48ae-9a03-4c0427ec25b1 req-8c62e46e-de38-41b1-95d2-422a523753d5 service nova] Acquired lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.263312] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c25fbe-f577-490e-8e70-7b56c250200f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.283637] env[62627]: DEBUG oslo_concurrency.lockutils [req-70b3910d-4b9b-48ae-9a03-4c0427ec25b1 req-8c62e46e-de38-41b1-95d2-422a523753d5 service nova] Releasing lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.283924] env[62627]: WARNING nova.compute.manager [req-70b3910d-4b9b-48ae-9a03-4c0427ec25b1 req-8c62e46e-de38-41b1-95d2-422a523753d5 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Detach interface failed, port_id=b8c79070-6650-4427-ab59-c910dccfba54, reason: No device with interface-id b8c79070-6650-4427-ab59-c910dccfba54 exists on VM: nova.exception.NotFound: No device with interface-id b8c79070-6650-4427-ab59-c910dccfba54 exists on VM [ 1160.307972] env[62627]: ERROR nova.scheduler.client.report [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [req-5c9bda32-90e6-4de7-ac04-b8aa162030ff] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5c9bda32-90e6-4de7-ac04-b8aa162030ff"}]} [ 1160.326690] env[62627]: DEBUG nova.scheduler.client.report [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1160.344901] env[62627]: DEBUG nova.compute.manager [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received event network-vif-deleted-aa0a7b01-9a61-4e32-82cb-8f849bff6266 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1160.345220] env[62627]: INFO nova.compute.manager [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Neutron deleted interface aa0a7b01-9a61-4e32-82cb-8f849bff6266; detaching it from the instance and deleting it from the info cache [ 1160.345630] env[62627]: DEBUG nova.network.neutron [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updating instance_info_cache with network_info: [{"id": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "address": "fa:16:3e:05:76:ef", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3f3405-39", "ovs_interfaceid": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.354352] env[62627]: DEBUG nova.scheduler.client.report [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1160.354352] env[62627]: DEBUG nova.compute.provider_tree [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1160.366566] env[62627]: DEBUG nova.scheduler.client.report [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1160.389216] env[62627]: DEBUG nova.scheduler.client.report [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1160.439753] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195093, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.495823] env[62627]: DEBUG oslo_vmware.api [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195092, 'name': RemoveSnapshot_Task, 'duration_secs': 0.588914} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.497033] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1160.497229] env[62627]: INFO nova.compute.manager [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Took 17.12 seconds to snapshot the instance on the hypervisor. [ 1160.579560] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.579560] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.579560] env[62627]: DEBUG nova.compute.manager [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Going to confirm migration 3 {{(pid=62627) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1160.654016] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f036aae-e6ac-40a2-9835-a6a40af04c5b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.661535] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b5ef91-b9dc-4a8f-a408-64e4dc665846 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.695755] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c599e581-8ba0-48b1-954f-0fa83dae337b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.704295] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e638e4-cbc7-46bf-a0db-6faa48d34156 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.721045] env[62627]: DEBUG nova.compute.provider_tree [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1160.849251] env[62627]: DEBUG oslo_concurrency.lockutils [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Acquiring lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1160.849427] env[62627]: DEBUG oslo_concurrency.lockutils [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Acquired lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.850258] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0c7af3-6883-4462-a69e-4b6d32654d5f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.872634] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7104e86a-3a7b-4c15-aaa0-91bc372c81c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.904318] env[62627]: DEBUG nova.virt.vmwareapi.vmops [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Reconfiguring VM to detach interface {{(pid=62627) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1160.904849] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-472925f0-a864-4a2e-9280-4852c69c1131 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.921084] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "84b8381c-bdd4-462e-aa98-0cc972f00882" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.938809] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Waiting for the task: (returnval){ [ 1160.938809] env[62627]: value = "task-2195094" [ 1160.938809] env[62627]: _type = "Task" [ 1160.938809] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.949806] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195093, 'name': ReconfigVM_Task, 'duration_secs': 0.557521} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.950632] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 634d547b-3b5e-4185-8229-daf055ca6e69/634d547b-3b5e-4185-8229-daf055ca6e69.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1160.950909] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance '634d547b-3b5e-4185-8229-daf055ca6e69' progress to 50 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1160.957784] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.058321] env[62627]: DEBUG nova.compute.manager [None req-d0b2807c-3680-4f24-9c65-330d4dce8de2 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Found 2 images (rotation: 2) {{(pid=62627) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1161.110109] env[62627]: INFO nova.network.neutron [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Port aa0a7b01-9a61-4e32-82cb-8f849bff6266 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1161.110109] env[62627]: DEBUG nova.network.neutron [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updating instance_info_cache with network_info: [{"id": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "address": "fa:16:3e:05:76:ef", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3f3405-39", "ovs_interfaceid": "4c3f3405-39a4-4c22-9c26-bdd0ab342c29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.161753] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.161753] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.163947] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1161.164140] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquired lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.164315] env[62627]: DEBUG nova.network.neutron [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1161.164496] env[62627]: DEBUG nova.objects.instance [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lazy-loading 'info_cache' on Instance uuid abb14e3e-b521-4f21-bc94-6bbb66fbee57 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.255881] env[62627]: DEBUG nova.scheduler.client.report [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 119 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1161.256302] env[62627]: DEBUG nova.compute.provider_tree [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 119 to 120 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1161.256483] env[62627]: DEBUG nova.compute.provider_tree [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1161.450167] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.460921] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1504fea5-5c08-4cea-a3b3-b8f17c98c856 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.480172] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd22fa39-cdc9-4bf9-840c-3e8e68802dab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.497846] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance '634d547b-3b5e-4185-8229-daf055ca6e69' progress to 67 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1161.617135] env[62627]: DEBUG oslo_concurrency.lockutils [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1161.673434] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.673434] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1161.673434] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Rebuilding the list of instances to heal {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1161.761066] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.762s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.763397] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.569s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.764889] env[62627]: INFO nova.compute.claims [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1161.783147] env[62627]: INFO nova.scheduler.client.report [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Deleted allocations for instance 457e5aa6-d229-4f48-8817-e918ec4bd0f9 [ 1161.896042] env[62627]: DEBUG nova.compute.manager [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1161.896401] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46befb13-f97b-4544-82ae-32469832eb74 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.950830] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.066894] env[62627]: DEBUG nova.network.neutron [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Port 9830a1ce-942e-4954-8513-f624e13d304c binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1162.120870] env[62627]: DEBUG oslo_concurrency.lockutils [None req-203f628b-0e72-4020-84e1-97edea347939 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-84b8381c-bdd4-462e-aa98-0cc972f00882-b8c79070-6650-4427-ab59-c910dccfba54" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.281s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.179669] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Skipping network cache update for instance because it is being deleted. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10462}} [ 1162.179855] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Skipping network cache update for instance because it is being deleted. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10462}} [ 1162.217307] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1162.217451] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquired lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.217832] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Forcefully refreshing network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1162.217832] env[62627]: DEBUG nova.objects.instance [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lazy-loading 'info_cache' on Instance uuid 241311b0-ba81-41d1-a9b6-cd40d04dbe3c {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.290932] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c33b2cac-d408-4e3e-b98d-a8712c825232 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "457e5aa6-d229-4f48-8817-e918ec4bd0f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.962s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.407890] env[62627]: INFO nova.compute.manager [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] instance snapshotting [ 1162.408609] env[62627]: DEBUG nova.objects.instance [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'flavor' on Instance uuid d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.451501] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.546452] env[62627]: DEBUG nova.compute.manager [req-3d7f9620-b0b3-4f2f-8366-d0a9448d8125 req-2c9558ba-618c-4c7a-a79a-9a430f47f8ec service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Received event network-changed-4534e6a9-471a-4703-aa7a-7aac3d20292f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1162.546650] env[62627]: DEBUG nova.compute.manager [req-3d7f9620-b0b3-4f2f-8366-d0a9448d8125 req-2c9558ba-618c-4c7a-a79a-9a430f47f8ec service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Refreshing instance network info cache due to event network-changed-4534e6a9-471a-4703-aa7a-7aac3d20292f. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1162.546862] env[62627]: DEBUG oslo_concurrency.lockutils [req-3d7f9620-b0b3-4f2f-8366-d0a9448d8125 req-2c9558ba-618c-4c7a-a79a-9a430f47f8ec service nova] Acquiring lock "refresh_cache-367a83b9-79f8-43b2-91dd-40d13193fa40" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1162.547036] env[62627]: DEBUG oslo_concurrency.lockutils [req-3d7f9620-b0b3-4f2f-8366-d0a9448d8125 req-2c9558ba-618c-4c7a-a79a-9a430f47f8ec service nova] Acquired lock "refresh_cache-367a83b9-79f8-43b2-91dd-40d13193fa40" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.547216] env[62627]: DEBUG nova.network.neutron [req-3d7f9620-b0b3-4f2f-8366-d0a9448d8125 req-2c9558ba-618c-4c7a-a79a-9a430f47f8ec service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Refreshing network info cache for port 4534e6a9-471a-4703-aa7a-7aac3d20292f {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1162.704759] env[62627]: DEBUG nova.network.neutron [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance_info_cache with network_info: [{"id": "9297477b-3156-43b7-b7ca-4c2d76609f24", "address": "fa:16:3e:93:95:7a", "network": {"id": "7c6d5a35-da58-4a44-83d0-20feb994e5ab", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-709440062-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6c9263b20f714274a55b88a8bdbd5251", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9297477b-31", "ovs_interfaceid": "9297477b-3156-43b7-b7ca-4c2d76609f24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.914070] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18305519-9960-4334-9e62-a649f744751c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.940804] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81659a9d-4776-4b69-8308-5e684ab38b46 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.951801] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.978475] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073d781e-6265-4fdb-b7ce-d38c67e71da4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.986374] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f2a852-4d80-49ed-b032-222a299f8af3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.020462] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c74232-fa26-4b97-9b47-f8eb9ef9925e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.028593] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1e7e43-6a9b-4abd-9e70-29542ae88fce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.042035] env[62627]: DEBUG nova.compute.provider_tree [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1163.091493] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "634d547b-3b5e-4185-8229-daf055ca6e69-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1163.091717] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "634d547b-3b5e-4185-8229-daf055ca6e69-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.091889] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "634d547b-3b5e-4185-8229-daf055ca6e69-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.210499] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Releasing lock "refresh_cache-abb14e3e-b521-4f21-bc94-6bbb66fbee57" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1163.210763] env[62627]: DEBUG nova.objects.instance [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lazy-loading 'migration_context' on Instance uuid abb14e3e-b521-4f21-bc94-6bbb66fbee57 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1163.296681] env[62627]: DEBUG nova.network.neutron [req-3d7f9620-b0b3-4f2f-8366-d0a9448d8125 req-2c9558ba-618c-4c7a-a79a-9a430f47f8ec service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Updated VIF entry in instance network info cache for port 4534e6a9-471a-4703-aa7a-7aac3d20292f. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1163.297021] env[62627]: DEBUG nova.network.neutron [req-3d7f9620-b0b3-4f2f-8366-d0a9448d8125 req-2c9558ba-618c-4c7a-a79a-9a430f47f8ec service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Updating instance_info_cache with network_info: [{"id": "4534e6a9-471a-4703-aa7a-7aac3d20292f", "address": "fa:16:3e:55:aa:31", "network": {"id": "e4275dc3-df4b-49c1-9677-3c0fc9408d2e", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1014582117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711f8ab1a2d5466399f408870e412587", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e83154-c0d2-4d3d-b95e-3cd5ba336257", "external-id": "nsx-vlan-transportzone-771", "segmentation_id": 771, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4534e6a9-47", "ovs_interfaceid": "4534e6a9-471a-4703-aa7a-7aac3d20292f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.455832] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.459924] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1163.460257] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2593ad4b-06f2-40fe-9979-013639cfd7dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.467469] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1163.467469] env[62627]: value = "task-2195096" [ 1163.467469] env[62627]: _type = "Task" [ 1163.467469] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.476016] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195096, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.545472] env[62627]: DEBUG nova.scheduler.client.report [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.714107] env[62627]: DEBUG nova.objects.base [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1163.715122] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eeaa271-64bc-40c6-bf6c-33dc35de78d7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.735402] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d418cedd-1b4c-4ccc-84f2-97221a343ba3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.741992] env[62627]: DEBUG oslo_vmware.api [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1163.741992] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52896d64-76db-396d-9645-ec19dac9eb1f" [ 1163.741992] env[62627]: _type = "Task" [ 1163.741992] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.751813] env[62627]: DEBUG oslo_vmware.api [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52896d64-76db-396d-9645-ec19dac9eb1f, 'name': SearchDatastore_Task, 'duration_secs': 0.00715} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.751813] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1163.799532] env[62627]: DEBUG oslo_concurrency.lockutils [req-3d7f9620-b0b3-4f2f-8366-d0a9448d8125 req-2c9558ba-618c-4c7a-a79a-9a430f47f8ec service nova] Releasing lock "refresh_cache-367a83b9-79f8-43b2-91dd-40d13193fa40" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1163.955897] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.958343] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Updating instance_info_cache with network_info: [{"id": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "address": "fa:16:3e:e3:64:4d", "network": {"id": "0b8ce5a6-9abc-4473-b558-a4e9e5c7d741", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1874234589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d41b3fa69e2473fbea79b0ec52108fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddd3e21b-14", "ovs_interfaceid": "ddd3e21b-1460-4da2-bfcc-258303d2719e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.977335] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195096, 'name': CreateSnapshot_Task, 'duration_secs': 0.44511} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.977576] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1163.978320] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68300ae2-aa0e-413f-b25a-9a304bd1b1a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.051168] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.288s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.051564] env[62627]: DEBUG nova.compute.manager [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1164.054243] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.303s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.126844] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1164.127046] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.127266] env[62627]: DEBUG nova.network.neutron [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1164.455361] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.460970] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Releasing lock "refresh_cache-241311b0-ba81-41d1-a9b6-cd40d04dbe3c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1164.461317] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Updated the network info_cache for instance {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1164.461360] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.461495] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.461635] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.461778] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.461916] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.462069] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.462198] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1164.462339] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.495138] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1164.495440] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5f963337-2251-4a74-8833-09a02cd5fd06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.505885] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1164.505885] env[62627]: value = "task-2195097" [ 1164.505885] env[62627]: _type = "Task" [ 1164.505885] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.513940] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195097, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.560242] env[62627]: DEBUG nova.compute.utils [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1164.561976] env[62627]: DEBUG nova.compute.manager [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1164.562597] env[62627]: DEBUG nova.network.neutron [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1164.613391] env[62627]: DEBUG nova.policy [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11809f17aed344258d9eee68421e1df3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9b8ab3c165b34e418807f39bc01abf16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1164.778478] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb94a02-9c81-45f0-9196-9e9e94d0cb0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.787946] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bad32b-03f1-4d99-80a1-22d9edc03f0c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.830679] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f98b98b-ef89-4990-80f2-6082bbed6903 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.844344] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73db6e22-f9f3-4505-968d-4c0110f3454a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.860980] env[62627]: DEBUG nova.compute.provider_tree [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.951374] env[62627]: DEBUG nova.network.neutron [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Successfully created port: 438ff9e4-843e-4e29-b42f-fd893752d160 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1164.954425] env[62627]: DEBUG nova.network.neutron [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance_info_cache with network_info: [{"id": "9830a1ce-942e-4954-8513-f624e13d304c", "address": "fa:16:3e:bb:02:dc", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9830a1ce-94", "ovs_interfaceid": "9830a1ce-942e-4954-8513-f624e13d304c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.962949] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.965994] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.017886] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195097, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.065697] env[62627]: DEBUG nova.compute.manager [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1165.367855] env[62627]: DEBUG nova.scheduler.client.report [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1165.458357] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1165.462009] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.517368] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195097, 'name': CloneVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.571393] env[62627]: INFO nova.virt.block_device [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Booting with volume 5211d219-5b96-4822-bdba-9d41bc2363b9 at /dev/sda [ 1165.608041] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57b0461e-7082-462f-b91c-8c34c5238b01 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.619321] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a45da78-526a-4393-8fb0-1497efd87fd4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.653477] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bed91384-03dd-4577-91bf-fb069b760ccc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.662480] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46cfe46-0988-4e11-ba35-7ad05effc857 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.698534] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aaf0004-5f74-4765-b0d8-ca866da03252 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.708482] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb09fa7-8ec2-42da-b431-b48aaf7a656c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.720475] env[62627]: DEBUG nova.virt.block_device [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Updating existing volume attachment record: 4e9ab5e6-d668-4d08-8c6d-a29f874549e1 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1165.958014] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.983309] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410e4fc1-228f-4dbc-99f0-50b2acfaddce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.003815] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c9e0d3-e51a-49be-8030-1fa36a423bfb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.013948] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance '634d547b-3b5e-4185-8229-daf055ca6e69' progress to 83 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1166.023477] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195097, 'name': CloneVM_Task, 'duration_secs': 1.073594} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.023771] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Created linked-clone VM from snapshot [ 1166.024532] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fac060-ae07-4443-b62a-1ef1f8f02965 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.032400] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Uploading image 17aa2842-af5e-4093-b978-951f09730de0 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1166.060612] env[62627]: DEBUG oslo_vmware.rw_handles [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1166.060612] env[62627]: value = "vm-447795" [ 1166.060612] env[62627]: _type = "VirtualMachine" [ 1166.060612] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1166.060901] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-34540d2c-7fec-4b98-a865-fb37c95ebf84 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.068967] env[62627]: DEBUG oslo_vmware.rw_handles [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lease: (returnval){ [ 1166.068967] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dea3fc-39b0-650f-45f2-d87e3cd92b09" [ 1166.068967] env[62627]: _type = "HttpNfcLease" [ 1166.068967] env[62627]: } obtained for exporting VM: (result){ [ 1166.068967] env[62627]: value = "vm-447795" [ 1166.068967] env[62627]: _type = "VirtualMachine" [ 1166.068967] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1166.069304] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the lease: (returnval){ [ 1166.069304] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dea3fc-39b0-650f-45f2-d87e3cd92b09" [ 1166.069304] env[62627]: _type = "HttpNfcLease" [ 1166.069304] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1166.076446] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1166.076446] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dea3fc-39b0-650f-45f2-d87e3cd92b09" [ 1166.076446] env[62627]: _type = "HttpNfcLease" [ 1166.076446] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1166.381677] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.327s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.386199] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.420s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.386199] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.386199] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1166.386199] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f1e24f-fe4b-4a09-9c3c-7a3200545824 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.395787] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6ff893-a275-4787-be97-10fcc8dfa6dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.412289] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb6c237-0aaa-4585-b52d-377375583b6d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.420365] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f622184a-c8e6-40aa-aad4-23e5f606a34b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.452371] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178928MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1166.452371] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.452575] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.465374] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.521152] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1166.521152] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19f03c8b-bb1e-4584-9435-c58f1f34cb33 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.529741] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1166.529741] env[62627]: value = "task-2195099" [ 1166.529741] env[62627]: _type = "Task" [ 1166.529741] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.535811] env[62627]: DEBUG nova.network.neutron [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Successfully updated port: 438ff9e4-843e-4e29-b42f-fd893752d160 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1166.539949] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195099, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.542339] env[62627]: DEBUG nova.compute.manager [req-b42423a1-011a-4911-8e6d-d4b0f6dd0173 req-b060fb5f-3c79-4368-bb38-ab9ddda0927e service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Received event network-vif-plugged-438ff9e4-843e-4e29-b42f-fd893752d160 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1166.542550] env[62627]: DEBUG oslo_concurrency.lockutils [req-b42423a1-011a-4911-8e6d-d4b0f6dd0173 req-b060fb5f-3c79-4368-bb38-ab9ddda0927e service nova] Acquiring lock "c25fe185-2d6a-40a8-be06-84ee22a5464e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.542745] env[62627]: DEBUG oslo_concurrency.lockutils [req-b42423a1-011a-4911-8e6d-d4b0f6dd0173 req-b060fb5f-3c79-4368-bb38-ab9ddda0927e service nova] Lock "c25fe185-2d6a-40a8-be06-84ee22a5464e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.543048] env[62627]: DEBUG oslo_concurrency.lockutils [req-b42423a1-011a-4911-8e6d-d4b0f6dd0173 req-b060fb5f-3c79-4368-bb38-ab9ddda0927e service nova] Lock "c25fe185-2d6a-40a8-be06-84ee22a5464e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.543093] env[62627]: DEBUG nova.compute.manager [req-b42423a1-011a-4911-8e6d-d4b0f6dd0173 req-b060fb5f-3c79-4368-bb38-ab9ddda0927e service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] No waiting events found dispatching network-vif-plugged-438ff9e4-843e-4e29-b42f-fd893752d160 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1166.543235] env[62627]: WARNING nova.compute.manager [req-b42423a1-011a-4911-8e6d-d4b0f6dd0173 req-b060fb5f-3c79-4368-bb38-ab9ddda0927e service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Received unexpected event network-vif-plugged-438ff9e4-843e-4e29-b42f-fd893752d160 for instance with vm_state building and task_state block_device_mapping. [ 1166.577117] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1166.577117] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dea3fc-39b0-650f-45f2-d87e3cd92b09" [ 1166.577117] env[62627]: _type = "HttpNfcLease" [ 1166.577117] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1166.577504] env[62627]: DEBUG oslo_vmware.rw_handles [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1166.577504] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dea3fc-39b0-650f-45f2-d87e3cd92b09" [ 1166.577504] env[62627]: _type = "HttpNfcLease" [ 1166.577504] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1166.578363] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39274a9-53c8-4367-affa-d793a64e1a3e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.587049] env[62627]: DEBUG oslo_vmware.rw_handles [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263e269-e733-6e53-324b-6fb02b7e543e/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1166.587326] env[62627]: DEBUG oslo_vmware.rw_handles [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263e269-e733-6e53-324b-6fb02b7e543e/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1166.676746] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-37486746-6056-4dba-8405-81706a853524 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.951451] env[62627]: INFO nova.scheduler.client.report [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted allocation for migration eb241675-c37b-49bb-9c2a-1c17acedf01d [ 1166.973750] env[62627]: DEBUG oslo_vmware.api [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Task: {'id': task-2195094, 'name': ReconfigVM_Task, 'duration_secs': 5.838374} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.973995] env[62627]: DEBUG oslo_concurrency.lockutils [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] Releasing lock "84b8381c-bdd4-462e-aa98-0cc972f00882" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1166.974292] env[62627]: DEBUG nova.virt.vmwareapi.vmops [req-57257b7b-49f3-4a47-8eea-7b791cde4039 req-6670a441-f97d-4b51-bb7e-899f3bc24e0c service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Reconfigured VM to detach interface {{(pid=62627) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1166.974946] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.054s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.975347] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.976858] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.976858] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.977973] env[62627]: INFO nova.compute.manager [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Terminating instance [ 1167.042374] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Acquiring lock "refresh_cache-c25fe185-2d6a-40a8-be06-84ee22a5464e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1167.043069] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Acquired lock "refresh_cache-c25fe185-2d6a-40a8-be06-84ee22a5464e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.043069] env[62627]: DEBUG nova.network.neutron [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1167.044324] env[62627]: DEBUG oslo_vmware.api [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195099, 'name': PowerOnVM_Task, 'duration_secs': 0.416667} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.045008] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1167.045008] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bdd4ea33-19d1-4ac9-9e5a-8b52d55bdc18 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance '634d547b-3b5e-4185-8229-daf055ca6e69' progress to 100 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1167.468630] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.889s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.470246] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Applying migration context for instance 634d547b-3b5e-4185-8229-daf055ca6e69 as it has an incoming, in-progress migration 06213fc3-8b9e-48ce-841a-c124a8ef7acb. Migration status is post-migrating {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1167.472098] env[62627]: INFO nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating resource usage from migration 06213fc3-8b9e-48ce-841a-c124a8ef7acb [ 1167.482662] env[62627]: DEBUG nova.compute.manager [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1167.482923] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1167.484132] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9343e564-0a9e-48e6-af78-fa212fa2bc5b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.495325] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1167.495649] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33046569-4214-45aa-9852-c9f48977703f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.498264] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 241311b0-ba81-41d1-a9b6-cd40d04dbe3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.498402] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance aa181094-cb78-44e0-8d7a-1d677e0af1fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.498525] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.498645] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.498762] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 84b8381c-bdd4-462e-aa98-0cc972f00882 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.498877] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance e9d33611-e373-4303-937d-b43d893e51f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.498994] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.499163] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance b282093d-8013-436d-94d2-bc26161456ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.499288] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 3001147e-7be3-4f72-b330-8f99bf7cf3ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.499443] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance abb14e3e-b521-4f21-bc94-6bbb66fbee57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.499575] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 367a83b9-79f8-43b2-91dd-40d13193fa40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.499689] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Migration 06213fc3-8b9e-48ce-841a-c124a8ef7acb is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1167.499802] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 634d547b-3b5e-4185-8229-daf055ca6e69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.499916] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance c25fe185-2d6a-40a8-be06-84ee22a5464e actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1167.500170] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1167.500316] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3328MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1167.510937] env[62627]: DEBUG oslo_vmware.api [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1167.510937] env[62627]: value = "task-2195100" [ 1167.510937] env[62627]: _type = "Task" [ 1167.510937] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.520925] env[62627]: DEBUG oslo_vmware.api [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.582378] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.582646] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.582848] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.583040] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.583480] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.586258] env[62627]: INFO nova.compute.manager [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Terminating instance [ 1167.588469] env[62627]: DEBUG nova.network.neutron [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1167.741655] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ef7ca0-14bc-4692-a524-875ea9115d0f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.745910] env[62627]: DEBUG nova.network.neutron [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Updating instance_info_cache with network_info: [{"id": "438ff9e4-843e-4e29-b42f-fd893752d160", "address": "fa:16:3e:8c:f6:9e", "network": {"id": "df8b542a-cb92-4195-a818-4e830c2b65b9", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1973701137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b8ab3c165b34e418807f39bc01abf16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap438ff9e4-84", "ovs_interfaceid": "438ff9e4-843e-4e29-b42f-fd893752d160", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.755384] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31ad8fb-5b48-4fcb-bfeb-ada1af026974 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.791749] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf303533-8063-411f-ac41-2a6e08556249 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.800549] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adf7196-96e6-4fe2-a68b-2b1a86df942c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.806737] env[62627]: DEBUG nova.compute.manager [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1167.807556] env[62627]: DEBUG nova.virt.hardware [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1167.807667] env[62627]: DEBUG nova.virt.hardware [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1167.807908] env[62627]: DEBUG nova.virt.hardware [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1167.808209] env[62627]: DEBUG nova.virt.hardware [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1167.808452] env[62627]: DEBUG nova.virt.hardware [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1167.808729] env[62627]: DEBUG nova.virt.hardware [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1167.809020] env[62627]: DEBUG nova.virt.hardware [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1167.809321] env[62627]: DEBUG nova.virt.hardware [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1167.809987] env[62627]: DEBUG nova.virt.hardware [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1167.809987] env[62627]: DEBUG nova.virt.hardware [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1167.810175] env[62627]: DEBUG nova.virt.hardware [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1167.811353] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040fe84d-d5fe-4ff1-b9cd-9eaa3e9c0f69 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.830991] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc883e83-79fc-4b30-abdc-b8b6542cb5f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.837756] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.021943] env[62627]: DEBUG oslo_vmware.api [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195100, 'name': PowerOffVM_Task, 'duration_secs': 0.41001} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.022688] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1168.022877] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1168.023147] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-263d23ba-6f15-41e4-8d0a-4846ee332eb1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.096035] env[62627]: DEBUG nova.compute.manager [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1168.096193] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1168.096675] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1168.096995] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1168.097281] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleting the datastore file [datastore2] 84b8381c-bdd4-462e-aa98-0cc972f00882 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1168.098349] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0cc088-330d-43c3-a18e-7ea09f74af59 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.101529] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b9c9ee8-77e6-42ba-9b77-3265bb0b343e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.114138] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1168.115723] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-237d8db5-8dd0-4b36-90a8-4823783f5059 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.118104] env[62627]: DEBUG oslo_vmware.api [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1168.118104] env[62627]: value = "task-2195102" [ 1168.118104] env[62627]: _type = "Task" [ 1168.118104] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.127746] env[62627]: DEBUG oslo_vmware.api [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1168.127746] env[62627]: value = "task-2195103" [ 1168.127746] env[62627]: _type = "Task" [ 1168.127746] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.131630] env[62627]: DEBUG oslo_vmware.api [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195102, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.141361] env[62627]: DEBUG oslo_vmware.api [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195103, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.249239] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Releasing lock "refresh_cache-c25fe185-2d6a-40a8-be06-84ee22a5464e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1168.249607] env[62627]: DEBUG nova.compute.manager [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Instance network_info: |[{"id": "438ff9e4-843e-4e29-b42f-fd893752d160", "address": "fa:16:3e:8c:f6:9e", "network": {"id": "df8b542a-cb92-4195-a818-4e830c2b65b9", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1973701137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b8ab3c165b34e418807f39bc01abf16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap438ff9e4-84", "ovs_interfaceid": "438ff9e4-843e-4e29-b42f-fd893752d160", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1168.250069] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:f6:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '438ff9e4-843e-4e29-b42f-fd893752d160', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1168.258636] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Creating folder: Project (9b8ab3c165b34e418807f39bc01abf16). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1168.259014] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-732f8f78-60f5-45be-93d0-262dedb1aa0b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.276782] env[62627]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1168.277061] env[62627]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62627) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1168.277528] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Folder already exists: Project (9b8ab3c165b34e418807f39bc01abf16). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1168.277838] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Creating folder: Instances. Parent ref: group-v447785. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1168.278271] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3778c2f4-1666-426c-838c-5efc555a24f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.292018] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Created folder: Instances in parent group-v447785. [ 1168.292283] env[62627]: DEBUG oslo.service.loopingcall [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1168.292591] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1168.292904] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91baad70-6d8e-4797-8717-5f64b17d2165 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.314602] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1168.314602] env[62627]: value = "task-2195106" [ 1168.314602] env[62627]: _type = "Task" [ 1168.314602] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.324072] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195106, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.342051] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1168.579045] env[62627]: DEBUG nova.compute.manager [req-be954be0-2f73-4600-a9d4-f09a92cf0d20 req-3d83419b-74bf-4692-866c-9e68833f17eb service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Received event network-changed-438ff9e4-843e-4e29-b42f-fd893752d160 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1168.579480] env[62627]: DEBUG nova.compute.manager [req-be954be0-2f73-4600-a9d4-f09a92cf0d20 req-3d83419b-74bf-4692-866c-9e68833f17eb service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Refreshing instance network info cache due to event network-changed-438ff9e4-843e-4e29-b42f-fd893752d160. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1168.579621] env[62627]: DEBUG oslo_concurrency.lockutils [req-be954be0-2f73-4600-a9d4-f09a92cf0d20 req-3d83419b-74bf-4692-866c-9e68833f17eb service nova] Acquiring lock "refresh_cache-c25fe185-2d6a-40a8-be06-84ee22a5464e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.579815] env[62627]: DEBUG oslo_concurrency.lockutils [req-be954be0-2f73-4600-a9d4-f09a92cf0d20 req-3d83419b-74bf-4692-866c-9e68833f17eb service nova] Acquired lock "refresh_cache-c25fe185-2d6a-40a8-be06-84ee22a5464e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.580018] env[62627]: DEBUG nova.network.neutron [req-be954be0-2f73-4600-a9d4-f09a92cf0d20 req-3d83419b-74bf-4692-866c-9e68833f17eb service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Refreshing network info cache for port 438ff9e4-843e-4e29-b42f-fd893752d160 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1168.610694] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.610694] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.611304] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.611664] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.613505] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.614090] env[62627]: INFO nova.compute.manager [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Terminating instance [ 1168.630423] env[62627]: DEBUG oslo_vmware.api [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195102, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.282922} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.631551] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1168.631841] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1168.632182] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1168.632580] env[62627]: INFO nova.compute.manager [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1168.632952] env[62627]: DEBUG oslo.service.loopingcall [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1168.633619] env[62627]: DEBUG nova.compute.manager [-] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1168.633780] env[62627]: DEBUG nova.network.neutron [-] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1168.646607] env[62627]: DEBUG oslo_vmware.api [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195103, 'name': PowerOffVM_Task, 'duration_secs': 0.334404} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.647901] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1168.647901] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1168.647901] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59efb604-2690-4ed7-a407-088a96b24787 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.750787] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1168.751400] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1168.751400] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Deleting the datastore file [datastore2] aa181094-cb78-44e0-8d7a-1d677e0af1fe {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1168.751535] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d9eff1a-f8f7-4dc4-9a51-d1282b69205d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.760936] env[62627]: DEBUG oslo_vmware.api [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1168.760936] env[62627]: value = "task-2195108" [ 1168.760936] env[62627]: _type = "Task" [ 1168.760936] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.770684] env[62627]: DEBUG oslo_vmware.api [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195108, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.826091] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195106, 'name': CreateVM_Task, 'duration_secs': 0.421479} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.826286] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1168.826971] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'attachment_id': '4e9ab5e6-d668-4d08-8c6d-a29f874549e1', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447792', 'volume_id': '5211d219-5b96-4822-bdba-9d41bc2363b9', 'name': 'volume-5211d219-5b96-4822-bdba-9d41bc2363b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c25fe185-2d6a-40a8-be06-84ee22a5464e', 'attached_at': '', 'detached_at': '', 'volume_id': '5211d219-5b96-4822-bdba-9d41bc2363b9', 'serial': '5211d219-5b96-4822-bdba-9d41bc2363b9'}, 'boot_index': 0, 'mount_device': '/dev/sda', 'disk_bus': None, 'device_type': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=62627) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1168.827272] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Root volume attach. Driver type: vmdk {{(pid=62627) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1168.828093] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be8818a-cdf5-4aa4-895c-6aa56e093735 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.838928] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c952409-57bc-40e8-b7ce-42bced3fb156 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.847282] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e47f4a-e3ec-44cc-a90c-d902b9bf8da5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.850738] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1168.850846] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.398s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.856452] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-a5211d84-e29a-4a85-b598-7cf9fe68e9f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.874567] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for the task: (returnval){ [ 1168.874567] env[62627]: value = "task-2195109" [ 1168.874567] env[62627]: _type = "Task" [ 1168.874567] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.886841] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195109, 'name': RelocateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.120785] env[62627]: DEBUG nova.compute.manager [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1169.121118] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1169.122075] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3e8e3c-66ae-4222-be79-0fe6e1b5e629 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.132258] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1169.132700] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0de1ed5-a1d1-4f29-9244-a31550be1833 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.146785] env[62627]: DEBUG oslo_vmware.api [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1169.146785] env[62627]: value = "task-2195110" [ 1169.146785] env[62627]: _type = "Task" [ 1169.146785] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.157965] env[62627]: DEBUG oslo_vmware.api [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195110, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.280279] env[62627]: DEBUG oslo_vmware.api [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195108, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33801} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.280720] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1169.280910] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1169.281106] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1169.281335] env[62627]: INFO nova.compute.manager [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1169.281614] env[62627]: DEBUG oslo.service.loopingcall [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1169.282334] env[62627]: DEBUG nova.compute.manager [-] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1169.282483] env[62627]: DEBUG nova.network.neutron [-] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1169.366982] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "634d547b-3b5e-4185-8229-daf055ca6e69" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.367423] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "634d547b-3b5e-4185-8229-daf055ca6e69" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.367719] env[62627]: DEBUG nova.compute.manager [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Going to confirm migration 4 {{(pid=62627) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1169.389646] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195109, 'name': RelocateVM_Task} progress is 40%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.473475] env[62627]: DEBUG nova.network.neutron [req-be954be0-2f73-4600-a9d4-f09a92cf0d20 req-3d83419b-74bf-4692-866c-9e68833f17eb service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Updated VIF entry in instance network info cache for port 438ff9e4-843e-4e29-b42f-fd893752d160. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1169.473987] env[62627]: DEBUG nova.network.neutron [req-be954be0-2f73-4600-a9d4-f09a92cf0d20 req-3d83419b-74bf-4692-866c-9e68833f17eb service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Updating instance_info_cache with network_info: [{"id": "438ff9e4-843e-4e29-b42f-fd893752d160", "address": "fa:16:3e:8c:f6:9e", "network": {"id": "df8b542a-cb92-4195-a818-4e830c2b65b9", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1973701137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b8ab3c165b34e418807f39bc01abf16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap438ff9e4-84", "ovs_interfaceid": "438ff9e4-843e-4e29-b42f-fd893752d160", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.661691] env[62627]: DEBUG oslo_vmware.api [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195110, 'name': PowerOffVM_Task, 'duration_secs': 0.325403} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.662321] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1169.662321] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1169.662603] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b3bec0d-dbae-4eda-8fdb-3d84e02fc5be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.760549] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1169.760745] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1169.760936] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleting the datastore file [datastore2] abb14e3e-b521-4f21-bc94-6bbb66fbee57 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1169.761224] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6395217c-d0f1-4933-87e5-ae78b04b79ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.779857] env[62627]: DEBUG oslo_vmware.api [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for the task: (returnval){ [ 1169.779857] env[62627]: value = "task-2195112" [ 1169.779857] env[62627]: _type = "Task" [ 1169.779857] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.792401] env[62627]: DEBUG oslo_vmware.api [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.889557] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195109, 'name': RelocateVM_Task} progress is 54%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.931568] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1169.931754] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.932047] env[62627]: DEBUG nova.network.neutron [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1169.932211] env[62627]: DEBUG nova.objects.instance [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lazy-loading 'info_cache' on Instance uuid 634d547b-3b5e-4185-8229-daf055ca6e69 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1169.977768] env[62627]: DEBUG oslo_concurrency.lockutils [req-be954be0-2f73-4600-a9d4-f09a92cf0d20 req-3d83419b-74bf-4692-866c-9e68833f17eb service nova] Releasing lock "refresh_cache-c25fe185-2d6a-40a8-be06-84ee22a5464e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1170.098179] env[62627]: DEBUG nova.network.neutron [-] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.291369] env[62627]: DEBUG oslo_vmware.api [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Task: {'id': task-2195112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.344775} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.291637] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1170.291821] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1170.291998] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1170.292274] env[62627]: INFO nova.compute.manager [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1170.292533] env[62627]: DEBUG oslo.service.loopingcall [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1170.292746] env[62627]: DEBUG nova.compute.manager [-] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1170.292843] env[62627]: DEBUG nova.network.neutron [-] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1170.306875] env[62627]: DEBUG nova.network.neutron [-] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.386948] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195109, 'name': RelocateVM_Task} progress is 67%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.608082] env[62627]: INFO nova.compute.manager [-] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Took 1.97 seconds to deallocate network for instance. [ 1170.690632] env[62627]: DEBUG nova.compute.manager [req-42f6907e-23a3-4383-9255-98301042c1a6 req-1b4c45fd-f7c3-4075-9a6d-392055cff302 service nova] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Received event network-vif-deleted-4c3f3405-39a4-4c22-9c26-bdd0ab342c29 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1170.690933] env[62627]: DEBUG nova.compute.manager [req-42f6907e-23a3-4383-9255-98301042c1a6 req-1b4c45fd-f7c3-4075-9a6d-392055cff302 service nova] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Received event network-vif-deleted-4eb9bc28-5e4b-4b2e-859d-31b930210e13 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1170.809174] env[62627]: INFO nova.compute.manager [-] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Took 1.53 seconds to deallocate network for instance. [ 1170.887997] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195109, 'name': RelocateVM_Task} progress is 82%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.104510] env[62627]: DEBUG nova.network.neutron [-] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.117763] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.117763] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.117763] env[62627]: DEBUG nova.objects.instance [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'resources' on Instance uuid 84b8381c-bdd4-462e-aa98-0cc972f00882 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.250186] env[62627]: DEBUG nova.network.neutron [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance_info_cache with network_info: [{"id": "9830a1ce-942e-4954-8513-f624e13d304c", "address": "fa:16:3e:bb:02:dc", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9830a1ce-94", "ovs_interfaceid": "9830a1ce-942e-4954-8513-f624e13d304c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.319633] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.388999] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195109, 'name': RelocateVM_Task} progress is 97%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.610918] env[62627]: INFO nova.compute.manager [-] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Took 1.32 seconds to deallocate network for instance. [ 1171.753202] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "refresh_cache-634d547b-3b5e-4185-8229-daf055ca6e69" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.753202] env[62627]: DEBUG nova.objects.instance [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lazy-loading 'migration_context' on Instance uuid 634d547b-3b5e-4185-8229-daf055ca6e69 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.810409] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b757ae9-aa87-463f-a15e-25fa19a85f3c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.819232] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022042a6-53fa-4278-a081-0c05e5d6ff2a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.850877] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a381ef8-d663-4b04-a09a-1579624dd200 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.858438] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a127221-c545-4f47-94c2-b3aed4294e28 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.871943] env[62627]: DEBUG nova.compute.provider_tree [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1171.888077] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195109, 'name': RelocateVM_Task} progress is 97%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.117909] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.258767] env[62627]: DEBUG nova.objects.base [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Object Instance<634d547b-3b5e-4185-8229-daf055ca6e69> lazy-loaded attributes: info_cache,migration_context {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1172.259874] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aaa2881-61fd-439d-91e4-0cdc9cf2c379 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.282185] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12962abc-54d1-4c2e-9f37-7eb6205d86a8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.288899] env[62627]: DEBUG oslo_vmware.api [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1172.288899] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]527ae5ac-d660-7069-4ef6-0a73c99ed744" [ 1172.288899] env[62627]: _type = "Task" [ 1172.288899] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.297996] env[62627]: DEBUG oslo_vmware.api [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527ae5ac-d660-7069-4ef6-0a73c99ed744, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.375359] env[62627]: DEBUG nova.scheduler.client.report [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1172.389190] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195109, 'name': RelocateVM_Task} progress is 97%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.719693] env[62627]: DEBUG nova.compute.manager [req-274b5cb6-b2c8-4b70-a51f-e078de94b439 req-b4d4d215-4605-4ac4-8e97-9cfb9cfccde6 service nova] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Received event network-vif-deleted-9297477b-3156-43b7-b7ca-4c2d76609f24 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1172.803286] env[62627]: DEBUG oslo_vmware.api [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527ae5ac-d660-7069-4ef6-0a73c99ed744, 'name': SearchDatastore_Task, 'duration_secs': 0.010675} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.804208] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.881578] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.765s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1172.884509] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.565s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.884892] env[62627]: DEBUG nova.objects.instance [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lazy-loading 'resources' on Instance uuid aa181094-cb78-44e0-8d7a-1d677e0af1fe {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.896429] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195109, 'name': RelocateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.902410] env[62627]: INFO nova.scheduler.client.report [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleted allocations for instance 84b8381c-bdd4-462e-aa98-0cc972f00882 [ 1173.396417] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195109, 'name': RelocateVM_Task, 'duration_secs': 4.058333} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.396978] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1173.397290] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447792', 'volume_id': '5211d219-5b96-4822-bdba-9d41bc2363b9', 'name': 'volume-5211d219-5b96-4822-bdba-9d41bc2363b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c25fe185-2d6a-40a8-be06-84ee22a5464e', 'attached_at': '', 'detached_at': '', 'volume_id': '5211d219-5b96-4822-bdba-9d41bc2363b9', 'serial': '5211d219-5b96-4822-bdba-9d41bc2363b9'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1173.398261] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f97decb-07d3-446d-b57b-bc2444cf571d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.423386] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834e7493-7bc7-49a9-86d6-2d18e0b34f04 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.425878] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e31d8386-5288-4d21-960c-f2c6c7f95ff4 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "84b8381c-bdd4-462e-aa98-0cc972f00882" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.451s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.452069] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] volume-5211d219-5b96-4822-bdba-9d41bc2363b9/volume-5211d219-5b96-4822-bdba-9d41bc2363b9.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1173.455531] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75851f80-c013-4138-aecb-b5a3a741dfed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.477605] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for the task: (returnval){ [ 1173.477605] env[62627]: value = "task-2195113" [ 1173.477605] env[62627]: _type = "Task" [ 1173.477605] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.491355] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195113, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.639503] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52402b21-313b-4095-8102-edb824b8fc2e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.663715] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5799c5a-1f0c-4ae8-b2c6-ec7128f387e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.703332] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a260ec27-1417-4e83-883b-41bf2ba56639 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.713500] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33675a5f-9d2b-40ee-a974-6ad8c7110cd2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.731838] env[62627]: DEBUG nova.compute.provider_tree [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.868927] env[62627]: DEBUG oslo_concurrency.lockutils [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "b282093d-8013-436d-94d2-bc26161456ea" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.869386] env[62627]: DEBUG oslo_concurrency.lockutils [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "b282093d-8013-436d-94d2-bc26161456ea" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.869490] env[62627]: INFO nova.compute.manager [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Shelving [ 1173.989040] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195113, 'name': ReconfigVM_Task, 'duration_secs': 0.419061} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.989411] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Reconfigured VM instance instance-0000005e to attach disk [datastore2] volume-5211d219-5b96-4822-bdba-9d41bc2363b9/volume-5211d219-5b96-4822-bdba-9d41bc2363b9.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1173.995354] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9bcecd0-b95f-412e-aeb3-d74f1ec851b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.011594] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.011899] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.016976] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for the task: (returnval){ [ 1174.016976] env[62627]: value = "task-2195114" [ 1174.016976] env[62627]: _type = "Task" [ 1174.016976] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.029113] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195114, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.235865] env[62627]: DEBUG nova.scheduler.client.report [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1174.516477] env[62627]: DEBUG nova.compute.utils [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1174.532283] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195114, 'name': ReconfigVM_Task, 'duration_secs': 0.189688} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.532602] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447792', 'volume_id': '5211d219-5b96-4822-bdba-9d41bc2363b9', 'name': 'volume-5211d219-5b96-4822-bdba-9d41bc2363b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c25fe185-2d6a-40a8-be06-84ee22a5464e', 'attached_at': '', 'detached_at': '', 'volume_id': '5211d219-5b96-4822-bdba-9d41bc2363b9', 'serial': '5211d219-5b96-4822-bdba-9d41bc2363b9'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1174.533417] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b8b615c-e1c9-43f1-af32-499002f582fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.542236] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for the task: (returnval){ [ 1174.542236] env[62627]: value = "task-2195115" [ 1174.542236] env[62627]: _type = "Task" [ 1174.542236] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.552624] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195115, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.741776] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.857s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.744458] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.627s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.744702] env[62627]: DEBUG nova.objects.instance [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lazy-loading 'resources' on Instance uuid abb14e3e-b521-4f21-bc94-6bbb66fbee57 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.769585] env[62627]: INFO nova.scheduler.client.report [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Deleted allocations for instance aa181094-cb78-44e0-8d7a-1d677e0af1fe [ 1174.880306] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1174.880840] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c82cf0ad-a650-474b-9a3d-d6efc03a9989 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.890345] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1174.890345] env[62627]: value = "task-2195116" [ 1174.890345] env[62627]: _type = "Task" [ 1174.890345] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.900435] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195116, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.020076] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.053491] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195115, 'name': Rename_Task, 'duration_secs': 0.19982} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.053827] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1175.054112] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60a95ede-9e90-4acf-8f24-828fca6702de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.064230] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for the task: (returnval){ [ 1175.064230] env[62627]: value = "task-2195117" [ 1175.064230] env[62627]: _type = "Task" [ 1175.064230] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.073335] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195117, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.278853] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bc716414-d176-4607-9d1f-27d1d1ade42c tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "aa181094-cb78-44e0-8d7a-1d677e0af1fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.696s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.403454] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195116, 'name': PowerOffVM_Task, 'duration_secs': 0.249697} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.403700] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1175.404598] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23dfdc05-53b7-4604-b376-d5fda028938c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.408977] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059f9918-abb6-4beb-ad26-e1f7c6e2dd57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.430791] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4697c6-5708-41f7-83c5-910377c5744e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.435094] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832edfbd-a538-4c8d-b4f5-2479531c34b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.473825] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839cf2ef-19c8-4aad-a125-ddf0c6002026 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.484137] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0accbc51-4444-45cf-b54c-b9750885f131 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.502061] env[62627]: DEBUG nova.compute.provider_tree [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1175.582752] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195117, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.730752] env[62627]: DEBUG oslo_concurrency.lockutils [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.730922] env[62627]: DEBUG oslo_concurrency.lockutils [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.731147] env[62627]: DEBUG oslo_concurrency.lockutils [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.731348] env[62627]: DEBUG oslo_concurrency.lockutils [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.731524] env[62627]: DEBUG oslo_concurrency.lockutils [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.734054] env[62627]: INFO nova.compute.manager [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Terminating instance [ 1175.749450] env[62627]: DEBUG oslo_vmware.rw_handles [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263e269-e733-6e53-324b-6fb02b7e543e/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1175.750655] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261ed719-770c-4458-9da4-0524fb0d12d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.757906] env[62627]: DEBUG oslo_vmware.rw_handles [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263e269-e733-6e53-324b-6fb02b7e543e/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1175.758085] env[62627]: ERROR oslo_vmware.rw_handles [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263e269-e733-6e53-324b-6fb02b7e543e/disk-0.vmdk due to incomplete transfer. [ 1175.758652] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-521c07e7-3463-420a-b1e5-ddf59cf77c75 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.766235] env[62627]: DEBUG oslo_vmware.rw_handles [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263e269-e733-6e53-324b-6fb02b7e543e/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1175.766443] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Uploaded image 17aa2842-af5e-4093-b978-951f09730de0 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1175.768819] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1175.770054] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-34ed0812-4ffa-4736-957e-9aeb63c913e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.778297] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1175.778297] env[62627]: value = "task-2195118" [ 1175.778297] env[62627]: _type = "Task" [ 1175.778297] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.790081] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195118, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.975486] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1175.975976] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a01f4269-05da-41de-adf1-ef64f7f4f996 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.983836] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1175.983836] env[62627]: value = "task-2195119" [ 1175.983836] env[62627]: _type = "Task" [ 1175.983836] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.996385] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195119, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.004560] env[62627]: DEBUG nova.scheduler.client.report [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1176.077727] env[62627]: DEBUG oslo_vmware.api [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195117, 'name': PowerOnVM_Task, 'duration_secs': 0.727478} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.078125] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1176.078425] env[62627]: INFO nova.compute.manager [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1176.078722] env[62627]: DEBUG nova.compute.manager [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1176.079811] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db2af75-336e-4494-814a-87010869ea54 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.085334] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.085681] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.085973] env[62627]: INFO nova.compute.manager [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Attaching volume 98c96310-44b1-47ab-9c71-3099818174f5 to /dev/sdb [ 1176.127760] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ad87de-c542-4faf-aefc-941ab84a21fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.138440] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2831626-f001-4bf1-9db9-6a7e2625da30 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.156903] env[62627]: DEBUG nova.virt.block_device [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Updating existing volume attachment record: f7b2f37c-0e82-4fab-9b1d-74b9b00116a8 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1176.238412] env[62627]: DEBUG nova.compute.manager [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1176.238746] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1176.239839] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c3666e-a443-4f38-a43c-34bf89ade652 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.249650] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1176.249786] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50024828-b03b-4379-abcb-ae28ac8af186 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.257950] env[62627]: DEBUG oslo_vmware.api [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1176.257950] env[62627]: value = "task-2195120" [ 1176.257950] env[62627]: _type = "Task" [ 1176.257950] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.266783] env[62627]: DEBUG oslo_vmware.api [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.280129] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.280365] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.290656] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195118, 'name': Destroy_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.494537] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195119, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.510711] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.766s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.513316] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.710s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.542462] env[62627]: INFO nova.scheduler.client.report [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Deleted allocations for instance abb14e3e-b521-4f21-bc94-6bbb66fbee57 [ 1176.606816] env[62627]: INFO nova.compute.manager [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Took 17.43 seconds to build instance. [ 1176.769600] env[62627]: DEBUG oslo_vmware.api [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195120, 'name': PowerOffVM_Task, 'duration_secs': 0.293916} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.772237] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1176.772237] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1176.772237] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68cee7e6-d70c-4022-a1c3-3a54cbb6bd57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.785483] env[62627]: DEBUG nova.compute.manager [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1176.795432] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195118, 'name': Destroy_Task, 'duration_secs': 0.704476} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.795705] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Destroyed the VM [ 1176.795976] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1176.796242] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4ce7d248-fbed-4348-b944-0c217e5a71b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.804354] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1176.804354] env[62627]: value = "task-2195125" [ 1176.804354] env[62627]: _type = "Task" [ 1176.804354] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.835525] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1176.835754] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1176.835936] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Deleting the datastore file [datastore2] 241311b0-ba81-41d1-a9b6-cd40d04dbe3c {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1176.836220] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f8ffa54-895b-4c0e-8921-3bfee79ec8e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.845429] env[62627]: DEBUG oslo_vmware.api [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for the task: (returnval){ [ 1176.845429] env[62627]: value = "task-2195126" [ 1176.845429] env[62627]: _type = "Task" [ 1176.845429] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.857182] env[62627]: DEBUG oslo_vmware.api [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195126, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.995231] env[62627]: DEBUG nova.compute.manager [req-d6d89964-804e-4c77-a5f8-b141a40b1fa3 req-5b399d3f-7247-460e-94ec-58f9d0c54671 service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Received event network-changed-438ff9e4-843e-4e29-b42f-fd893752d160 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1176.995593] env[62627]: DEBUG nova.compute.manager [req-d6d89964-804e-4c77-a5f8-b141a40b1fa3 req-5b399d3f-7247-460e-94ec-58f9d0c54671 service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Refreshing instance network info cache due to event network-changed-438ff9e4-843e-4e29-b42f-fd893752d160. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1176.995897] env[62627]: DEBUG oslo_concurrency.lockutils [req-d6d89964-804e-4c77-a5f8-b141a40b1fa3 req-5b399d3f-7247-460e-94ec-58f9d0c54671 service nova] Acquiring lock "refresh_cache-c25fe185-2d6a-40a8-be06-84ee22a5464e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1176.996032] env[62627]: DEBUG oslo_concurrency.lockutils [req-d6d89964-804e-4c77-a5f8-b141a40b1fa3 req-5b399d3f-7247-460e-94ec-58f9d0c54671 service nova] Acquired lock "refresh_cache-c25fe185-2d6a-40a8-be06-84ee22a5464e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.996206] env[62627]: DEBUG nova.network.neutron [req-d6d89964-804e-4c77-a5f8-b141a40b1fa3 req-5b399d3f-7247-460e-94ec-58f9d0c54671 service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Refreshing network info cache for port 438ff9e4-843e-4e29-b42f-fd893752d160 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1177.001349] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195119, 'name': CreateSnapshot_Task, 'duration_secs': 0.6805} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.001808] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1177.002861] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec942a08-f9e9-4130-ae66-a3722118a8e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.050211] env[62627]: DEBUG oslo_concurrency.lockutils [None req-01508733-a48d-4853-b07d-ddc7d1054d51 tempest-DeleteServersTestJSON-1791757005 tempest-DeleteServersTestJSON-1791757005-project-member] Lock "abb14e3e-b521-4f21-bc94-6bbb66fbee57" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.439s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.108953] env[62627]: DEBUG oslo_concurrency.lockutils [None req-67891c76-0c9b-4295-abc4-75106dd40cfd tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "c25fe185-2d6a-40a8-be06-84ee22a5464e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.942s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.191414] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a2cd67-3112-4cea-a58f-645e0afc946b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.200100] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe208afd-e291-49ab-9187-91dc68bbbbd1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.232669] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf0e02d-5923-46ae-a01e-324e58514f72 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.242031] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebf9f34-0152-4613-9a9d-e3f45202acc3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.258212] env[62627]: DEBUG nova.compute.provider_tree [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.311139] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.314782] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195125, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.355875] env[62627]: DEBUG oslo_vmware.api [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Task: {'id': task-2195126, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308562} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.356174] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1177.356383] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1177.356678] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1177.356933] env[62627]: INFO nova.compute.manager [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1177.357351] env[62627]: DEBUG oslo.service.loopingcall [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1177.357612] env[62627]: DEBUG nova.compute.manager [-] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1177.357775] env[62627]: DEBUG nova.network.neutron [-] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1177.523017] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1177.523734] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a3cdec46-357b-467b-b312-67b936dd20f8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.535604] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1177.535604] env[62627]: value = "task-2195127" [ 1177.535604] env[62627]: _type = "Task" [ 1177.535604] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.547846] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195127, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.721101] env[62627]: DEBUG nova.compute.manager [req-a0f67787-b5c4-4d85-8324-66945da702d1 req-a68dd3d0-65cd-4f11-9891-e254cabde8ee service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Received event network-vif-deleted-ddd3e21b-1460-4da2-bfcc-258303d2719e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1177.721258] env[62627]: INFO nova.compute.manager [req-a0f67787-b5c4-4d85-8324-66945da702d1 req-a68dd3d0-65cd-4f11-9891-e254cabde8ee service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Neutron deleted interface ddd3e21b-1460-4da2-bfcc-258303d2719e; detaching it from the instance and deleting it from the info cache [ 1177.721420] env[62627]: DEBUG nova.network.neutron [req-a0f67787-b5c4-4d85-8324-66945da702d1 req-a68dd3d0-65cd-4f11-9891-e254cabde8ee service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.761893] env[62627]: DEBUG nova.scheduler.client.report [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1177.818204] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195125, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.819270] env[62627]: DEBUG nova.network.neutron [req-d6d89964-804e-4c77-a5f8-b141a40b1fa3 req-5b399d3f-7247-460e-94ec-58f9d0c54671 service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Updated VIF entry in instance network info cache for port 438ff9e4-843e-4e29-b42f-fd893752d160. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1177.819637] env[62627]: DEBUG nova.network.neutron [req-d6d89964-804e-4c77-a5f8-b141a40b1fa3 req-5b399d3f-7247-460e-94ec-58f9d0c54671 service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Updating instance_info_cache with network_info: [{"id": "438ff9e4-843e-4e29-b42f-fd893752d160", "address": "fa:16:3e:8c:f6:9e", "network": {"id": "df8b542a-cb92-4195-a818-4e830c2b65b9", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1973701137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b8ab3c165b34e418807f39bc01abf16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap438ff9e4-84", "ovs_interfaceid": "438ff9e4-843e-4e29-b42f-fd893752d160", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.046063] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195127, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.194286] env[62627]: DEBUG nova.network.neutron [-] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.224690] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90e108f3-b71b-4597-9647-fd20c2407f00 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.236794] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8602aeb-f83a-4124-974f-d6630defedab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.023904] env[62627]: DEBUG oslo_concurrency.lockutils [req-d6d89964-804e-4c77-a5f8-b141a40b1fa3 req-5b399d3f-7247-460e-94ec-58f9d0c54671 service nova] Releasing lock "refresh_cache-c25fe185-2d6a-40a8-be06-84ee22a5464e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.024461] env[62627]: INFO nova.compute.manager [-] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Took 1.67 seconds to deallocate network for instance. [ 1179.025138] env[62627]: DEBUG nova.compute.manager [req-a0f67787-b5c4-4d85-8324-66945da702d1 req-a68dd3d0-65cd-4f11-9891-e254cabde8ee service nova] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Detach interface failed, port_id=ddd3e21b-1460-4da2-bfcc-258303d2719e, reason: Instance 241311b0-ba81-41d1-a9b6-cd40d04dbe3c could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1179.042563] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195127, 'name': CloneVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.042860] env[62627]: DEBUG oslo_vmware.api [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195125, 'name': RemoveSnapshot_Task, 'duration_secs': 1.015471} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.043336] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1179.043580] env[62627]: INFO nova.compute.manager [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Took 16.13 seconds to snapshot the instance on the hypervisor. [ 1179.244739] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.245066] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.529436] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.016s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.532111] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.221s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.533544] env[62627]: INFO nova.compute.claims [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1179.541275] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195127, 'name': CloneVM_Task, 'duration_secs': 1.538031} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.541506] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Created linked-clone VM from snapshot [ 1179.542234] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e290582f-f243-4a8b-9d2f-eacc0ffd0527 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.545249] env[62627]: DEBUG oslo_concurrency.lockutils [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.552021] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Uploading image 0d9bb31a-1d73-47ee-a566-e579a137eff3 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1179.576109] env[62627]: DEBUG oslo_vmware.rw_handles [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1179.576109] env[62627]: value = "vm-447801" [ 1179.576109] env[62627]: _type = "VirtualMachine" [ 1179.576109] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1179.576375] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-18316da8-79ad-469b-9708-cc897dd60331 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.583455] env[62627]: DEBUG oslo_vmware.rw_handles [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lease: (returnval){ [ 1179.583455] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52777f6d-6a09-5b8e-9ca5-797db24b8d3d" [ 1179.583455] env[62627]: _type = "HttpNfcLease" [ 1179.583455] env[62627]: } obtained for exporting VM: (result){ [ 1179.583455] env[62627]: value = "vm-447801" [ 1179.583455] env[62627]: _type = "VirtualMachine" [ 1179.583455] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1179.583741] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the lease: (returnval){ [ 1179.583741] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52777f6d-6a09-5b8e-9ca5-797db24b8d3d" [ 1179.583741] env[62627]: _type = "HttpNfcLease" [ 1179.583741] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1179.591120] env[62627]: DEBUG nova.compute.manager [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Found 3 images (rotation: 2) {{(pid=62627) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1179.591308] env[62627]: DEBUG nova.compute.manager [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Rotating out 1 backups {{(pid=62627) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1179.591471] env[62627]: DEBUG nova.compute.manager [None req-894b7972-03f7-44d8-a097-d38b3798344b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Deleting image 092e7cd9-297b-4a2d-92b6-29f1a70c0b73 {{(pid=62627) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1179.594420] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1179.594420] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52777f6d-6a09-5b8e-9ca5-797db24b8d3d" [ 1179.594420] env[62627]: _type = "HttpNfcLease" [ 1179.594420] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1179.749217] env[62627]: DEBUG nova.compute.utils [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1180.094077] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1180.094077] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52777f6d-6a09-5b8e-9ca5-797db24b8d3d" [ 1180.094077] env[62627]: _type = "HttpNfcLease" [ 1180.094077] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1180.095080] env[62627]: INFO nova.scheduler.client.report [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted allocation for migration 06213fc3-8b9e-48ce-841a-c124a8ef7acb [ 1180.096259] env[62627]: DEBUG oslo_vmware.rw_handles [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1180.096259] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52777f6d-6a09-5b8e-9ca5-797db24b8d3d" [ 1180.096259] env[62627]: _type = "HttpNfcLease" [ 1180.096259] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1180.099320] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6567e25a-fa53-4ac9-bf39-e2d58e36d260 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.108087] env[62627]: DEBUG oslo_vmware.rw_handles [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526d52c8-5303-48c2-125d-b4a72a243f3c/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1180.108273] env[62627]: DEBUG oslo_vmware.rw_handles [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526d52c8-5303-48c2-125d-b4a72a243f3c/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1180.198895] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-59730194-1646-4a6e-9f2f-1c39466a5e8c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.251682] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.603889] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07afeed6-75ad-4f85-8c3f-ff7613a1d38f tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "634d547b-3b5e-4185-8229-daf055ca6e69" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.236s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.716799] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0de9aab-3452-4a39-bc5f-002388d1b6ad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.732227] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac04871-f5cf-46f2-b5f4-9ad388418e9e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.764543] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fb6fdf-e228-4b61-bc43-b04c205a1c8f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.773084] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12886a8b-f47c-430e-99b6-3e0e2e3ba62f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.788147] env[62627]: DEBUG nova.compute.provider_tree [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.215645] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1181.215906] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447800', 'volume_id': '98c96310-44b1-47ab-9c71-3099818174f5', 'name': 'volume-98c96310-44b1-47ab-9c71-3099818174f5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3001147e-7be3-4f72-b330-8f99bf7cf3ea', 'attached_at': '', 'detached_at': '', 'volume_id': '98c96310-44b1-47ab-9c71-3099818174f5', 'serial': '98c96310-44b1-47ab-9c71-3099818174f5'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1181.216883] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbe1078-d622-468c-a676-8b39d4bda966 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.247949] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa249f1-4696-4505-80f8-11fe95597dd8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.284609] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] volume-98c96310-44b1-47ab-9c71-3099818174f5/volume-98c96310-44b1-47ab-9c71-3099818174f5.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1181.285132] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8366821f-839a-4f3b-a9e3-cea50a1d4781 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.303424] env[62627]: DEBUG nova.scheduler.client.report [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1181.316934] env[62627]: DEBUG oslo_vmware.api [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1181.316934] env[62627]: value = "task-2195131" [ 1181.316934] env[62627]: _type = "Task" [ 1181.316934] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.328880] env[62627]: DEBUG oslo_vmware.api [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195131, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.341351] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.341351] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.341351] env[62627]: INFO nova.compute.manager [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Attaching volume 4c7ed974-1052-4c7c-aece-d94f225c352b to /dev/sdb [ 1181.391630] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a569d4f9-29fb-47d8-beb4-d7bfb97dda52 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.400067] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525c62c5-8519-4f31-88fb-683ccedf69b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.414956] env[62627]: DEBUG nova.virt.block_device [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Updating existing volume attachment record: a7bd880f-93b9-429b-a894-39dceb1206c1 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1181.810918] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.278s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.811110] env[62627]: DEBUG nova.compute.manager [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1181.817181] env[62627]: DEBUG oslo_concurrency.lockutils [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.269s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.817181] env[62627]: DEBUG nova.objects.instance [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lazy-loading 'resources' on Instance uuid 241311b0-ba81-41d1-a9b6-cd40d04dbe3c {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1181.818315] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.818315] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.818315] env[62627]: DEBUG nova.compute.manager [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1181.819721] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff2cf13-e5d7-451d-afb4-0d0dba98b97b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.831768] env[62627]: DEBUG nova.compute.manager [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62627) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1181.832456] env[62627]: DEBUG nova.objects.instance [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'flavor' on Instance uuid d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1181.839021] env[62627]: DEBUG oslo_vmware.api [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195131, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.320057] env[62627]: DEBUG nova.compute.utils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1182.322140] env[62627]: DEBUG nova.compute.manager [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1182.323031] env[62627]: DEBUG nova.network.neutron [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1182.334448] env[62627]: DEBUG oslo_vmware.api [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195131, 'name': ReconfigVM_Task, 'duration_secs': 0.61107} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.338741] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Reconfigured VM instance instance-00000059 to attach disk [datastore2] volume-98c96310-44b1-47ab-9c71-3099818174f5/volume-98c96310-44b1-47ab-9c71-3099818174f5.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1182.344268] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbf608fa-e7c9-4d77-b485-d26ebf99cbf8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.358831] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1182.359439] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-943c58c3-0f2f-4db7-afd8-55cf3247ee07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.368031] env[62627]: DEBUG oslo_vmware.api [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1182.368031] env[62627]: value = "task-2195135" [ 1182.368031] env[62627]: _type = "Task" [ 1182.368031] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.369629] env[62627]: DEBUG oslo_vmware.api [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1182.369629] env[62627]: value = "task-2195136" [ 1182.369629] env[62627]: _type = "Task" [ 1182.369629] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.382821] env[62627]: DEBUG nova.policy [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5e5eee141e496db2d2ea316c43e6be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e4f7ad3ebcf4c4d97bdbce58eec5a09', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1182.391098] env[62627]: DEBUG oslo_vmware.api [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195135, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.391098] env[62627]: DEBUG oslo_vmware.api [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195136, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.532105] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76593f3a-a8b7-4cc2-800a-2ff35defd7b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.543038] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f1d0dc-45b0-4c13-b392-aebd86a595f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.575028] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109fb806-29d9-4900-97cc-b886c6cc74ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.583590] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e976808-85cc-4e2c-b3b3-46c151d26df7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.598978] env[62627]: DEBUG nova.compute.provider_tree [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1182.786302] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "634d547b-3b5e-4185-8229-daf055ca6e69" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.786901] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "634d547b-3b5e-4185-8229-daf055ca6e69" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.787073] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "634d547b-3b5e-4185-8229-daf055ca6e69-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.787303] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "634d547b-3b5e-4185-8229-daf055ca6e69-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.787492] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "634d547b-3b5e-4185-8229-daf055ca6e69-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.789982] env[62627]: INFO nova.compute.manager [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Terminating instance [ 1182.828023] env[62627]: DEBUG nova.compute.manager [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1182.866982] env[62627]: DEBUG nova.network.neutron [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Successfully created port: 58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1182.881658] env[62627]: DEBUG oslo_vmware.api [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195136, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.884893] env[62627]: DEBUG oslo_vmware.api [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195135, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.105388] env[62627]: DEBUG nova.scheduler.client.report [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1183.294832] env[62627]: DEBUG nova.compute.manager [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1183.295155] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1183.296243] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12392c66-4073-468d-9f8a-3562e91b7d78 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.305614] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1183.305926] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4181ba1-8a8d-4830-b17a-2e5d8924b3c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.313421] env[62627]: DEBUG oslo_vmware.api [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1183.313421] env[62627]: value = "task-2195137" [ 1183.313421] env[62627]: _type = "Task" [ 1183.313421] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.324201] env[62627]: DEBUG oslo_vmware.api [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195137, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.385109] env[62627]: DEBUG oslo_vmware.api [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195136, 'name': PowerOffVM_Task, 'duration_secs': 0.739319} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.388948] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1183.389451] env[62627]: DEBUG nova.compute.manager [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1183.389842] env[62627]: DEBUG oslo_vmware.api [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195135, 'name': ReconfigVM_Task, 'duration_secs': 0.682836} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.390590] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3d1913-b216-43a4-8e21-b461fc23c031 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.393620] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447800', 'volume_id': '98c96310-44b1-47ab-9c71-3099818174f5', 'name': 'volume-98c96310-44b1-47ab-9c71-3099818174f5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3001147e-7be3-4f72-b330-8f99bf7cf3ea', 'attached_at': '', 'detached_at': '', 'volume_id': '98c96310-44b1-47ab-9c71-3099818174f5', 'serial': '98c96310-44b1-47ab-9c71-3099818174f5'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1183.506163] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1183.506409] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.608439] env[62627]: DEBUG oslo_concurrency.lockutils [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.794s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.644468] env[62627]: INFO nova.scheduler.client.report [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Deleted allocations for instance 241311b0-ba81-41d1-a9b6-cd40d04dbe3c [ 1183.825074] env[62627]: DEBUG oslo_vmware.api [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195137, 'name': PowerOffVM_Task, 'duration_secs': 0.314845} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.825411] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1183.825574] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1183.825853] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-efa24d13-aabe-4905-ab62-970c8c4b92f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.835989] env[62627]: DEBUG nova.compute.manager [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1183.871608] env[62627]: DEBUG nova.virt.hardware [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1183.872469] env[62627]: DEBUG nova.virt.hardware [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1183.872591] env[62627]: DEBUG nova.virt.hardware [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1183.872722] env[62627]: DEBUG nova.virt.hardware [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1183.872938] env[62627]: DEBUG nova.virt.hardware [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1183.873021] env[62627]: DEBUG nova.virt.hardware [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1183.873264] env[62627]: DEBUG nova.virt.hardware [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1183.873483] env[62627]: DEBUG nova.virt.hardware [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1183.873703] env[62627]: DEBUG nova.virt.hardware [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1183.874060] env[62627]: DEBUG nova.virt.hardware [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1183.874060] env[62627]: DEBUG nova.virt.hardware [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1183.875094] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efe6fc6-ac65-479e-beb7-7727245cfc19 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.885971] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfa0428-6d54-4d63-88ce-7f057cc9983a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.913551] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33a42480-de6e-438a-8889-ff027af2f5ae tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.096s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.011149] env[62627]: DEBUG nova.compute.manager [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1184.151970] env[62627]: DEBUG oslo_concurrency.lockutils [None req-17fe70c8-1277-40a6-82ca-ca355311b358 tempest-ServerRescueNegativeTestJSON-1339547319 tempest-ServerRescueNegativeTestJSON-1339547319-project-member] Lock "241311b0-ba81-41d1-a9b6-cd40d04dbe3c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.421s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.478469] env[62627]: DEBUG nova.objects.instance [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lazy-loading 'flavor' on Instance uuid 3001147e-7be3-4f72-b330-8f99bf7cf3ea {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1184.553020] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.553347] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.555118] env[62627]: INFO nova.compute.claims [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1184.873142] env[62627]: DEBUG nova.compute.manager [req-2f6cc1c8-7a83-47d6-92be-721653072647 req-2f7462af-f1a5-45cb-b936-ac324f72e3c5 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Received event network-vif-plugged-58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1184.873142] env[62627]: DEBUG oslo_concurrency.lockutils [req-2f6cc1c8-7a83-47d6-92be-721653072647 req-2f7462af-f1a5-45cb-b936-ac324f72e3c5 service nova] Acquiring lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.873142] env[62627]: DEBUG oslo_concurrency.lockutils [req-2f6cc1c8-7a83-47d6-92be-721653072647 req-2f7462af-f1a5-45cb-b936-ac324f72e3c5 service nova] Lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.873142] env[62627]: DEBUG oslo_concurrency.lockutils [req-2f6cc1c8-7a83-47d6-92be-721653072647 req-2f7462af-f1a5-45cb-b936-ac324f72e3c5 service nova] Lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.873142] env[62627]: DEBUG nova.compute.manager [req-2f6cc1c8-7a83-47d6-92be-721653072647 req-2f7462af-f1a5-45cb-b936-ac324f72e3c5 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] No waiting events found dispatching network-vif-plugged-58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1184.873142] env[62627]: WARNING nova.compute.manager [req-2f6cc1c8-7a83-47d6-92be-721653072647 req-2f7462af-f1a5-45cb-b936-ac324f72e3c5 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Received unexpected event network-vif-plugged-58815871-fdac-4406-a61e-4d7fca125122 for instance with vm_state building and task_state spawning. [ 1184.989040] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4f4c2876-2d6e-46b9-b575-053f9a6420b0 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.902s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.422190] env[62627]: DEBUG nova.network.neutron [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Successfully updated port: 58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1185.453722] env[62627]: DEBUG nova.compute.manager [req-3447b6b0-9799-44fb-94d6-c15b37691498 req-68c1dc21-e37c-4ad2-a28c-f829e0038dd8 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Received event network-changed-58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1185.453722] env[62627]: DEBUG nova.compute.manager [req-3447b6b0-9799-44fb-94d6-c15b37691498 req-68c1dc21-e37c-4ad2-a28c-f829e0038dd8 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing instance network info cache due to event network-changed-58815871-fdac-4406-a61e-4d7fca125122. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1185.453722] env[62627]: DEBUG oslo_concurrency.lockutils [req-3447b6b0-9799-44fb-94d6-c15b37691498 req-68c1dc21-e37c-4ad2-a28c-f829e0038dd8 service nova] Acquiring lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1185.453722] env[62627]: DEBUG oslo_concurrency.lockutils [req-3447b6b0-9799-44fb-94d6-c15b37691498 req-68c1dc21-e37c-4ad2-a28c-f829e0038dd8 service nova] Acquired lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.453722] env[62627]: DEBUG nova.network.neutron [req-3447b6b0-9799-44fb-94d6-c15b37691498 req-68c1dc21-e37c-4ad2-a28c-f829e0038dd8 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing network info cache for port 58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1185.582962] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1185.583422] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1185.583422] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleting the datastore file [datastore2] 634d547b-3b5e-4185-8229-daf055ca6e69 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1185.583748] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-609ebb0e-a0e3-496f-b12e-96642d922ffa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.592926] env[62627]: DEBUG oslo_vmware.api [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1185.592926] env[62627]: value = "task-2195140" [ 1185.592926] env[62627]: _type = "Task" [ 1185.592926] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.610799] env[62627]: DEBUG oslo_vmware.api [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195140, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.656128] env[62627]: DEBUG nova.compute.manager [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Stashing vm_state: stopped {{(pid=62627) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1185.806681] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b25926f-1967-47c3-90c9-77de09986911 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.816524] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef20f18e-63a7-4819-88fe-ae2b2544932b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.853247] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5bab5d-1305-4f58-bd03-871c728e154e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.860857] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9041de-f976-4e47-a67d-930c5093bd0b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.876769] env[62627]: DEBUG nova.compute.provider_tree [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1185.927040] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1185.976203] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1185.976517] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447803', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'name': 'volume-4c7ed974-1052-4c7c-aece-d94f225c352b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1', 'attached_at': '', 'detached_at': '', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'serial': '4c7ed974-1052-4c7c-aece-d94f225c352b'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1185.977769] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3e236a-0131-4563-a10c-86781f914eb4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.995456] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f98d063-2bb5-4d9c-81bf-f4d528ca8f11 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.030627] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] volume-4c7ed974-1052-4c7c-aece-d94f225c352b/volume-4c7ed974-1052-4c7c-aece-d94f225c352b.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1186.031815] env[62627]: DEBUG nova.network.neutron [req-3447b6b0-9799-44fb-94d6-c15b37691498 req-68c1dc21-e37c-4ad2-a28c-f829e0038dd8 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1186.035147] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fbcc1e1-f99a-41bb-a7d7-49020987a80b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.055950] env[62627]: DEBUG oslo_vmware.api [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1186.055950] env[62627]: value = "task-2195141" [ 1186.055950] env[62627]: _type = "Task" [ 1186.055950] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.065394] env[62627]: DEBUG oslo_vmware.api [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195141, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.104620] env[62627]: DEBUG oslo_vmware.api [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195140, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311963} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.105162] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.105592] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1186.105894] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1186.106224] env[62627]: INFO nova.compute.manager [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Took 2.81 seconds to destroy the instance on the hypervisor. [ 1186.106503] env[62627]: DEBUG oslo.service.loopingcall [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1186.109311] env[62627]: DEBUG nova.compute.manager [-] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1186.109545] env[62627]: DEBUG nova.network.neutron [-] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1186.183995] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.243583] env[62627]: DEBUG nova.network.neutron [req-3447b6b0-9799-44fb-94d6-c15b37691498 req-68c1dc21-e37c-4ad2-a28c-f829e0038dd8 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.380332] env[62627]: DEBUG nova.scheduler.client.report [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1186.568649] env[62627]: DEBUG oslo_vmware.api [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195141, 'name': ReconfigVM_Task, 'duration_secs': 0.500164} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.571015] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfigured VM instance instance-00000056 to attach disk [datastore2] volume-4c7ed974-1052-4c7c-aece-d94f225c352b/volume-4c7ed974-1052-4c7c-aece-d94f225c352b.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1186.574425] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc176c66-3581-4c11-b649-5df51136c5de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.590401] env[62627]: DEBUG oslo_vmware.api [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1186.590401] env[62627]: value = "task-2195142" [ 1186.590401] env[62627]: _type = "Task" [ 1186.590401] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.600841] env[62627]: DEBUG oslo_vmware.api [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195142, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.746227] env[62627]: DEBUG oslo_concurrency.lockutils [req-3447b6b0-9799-44fb-94d6-c15b37691498 req-68c1dc21-e37c-4ad2-a28c-f829e0038dd8 service nova] Releasing lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1186.746706] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.746865] env[62627]: DEBUG nova.network.neutron [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1186.802608] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquiring lock "722d7bce-f0d7-4891-aa48-0ead10786a6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.802914] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Lock "722d7bce-f0d7-4891-aa48-0ead10786a6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.890793] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.893293] env[62627]: DEBUG nova.compute.manager [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1186.895453] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.711s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.101566] env[62627]: DEBUG oslo_vmware.api [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195142, 'name': ReconfigVM_Task, 'duration_secs': 0.154523} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.102087] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447803', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'name': 'volume-4c7ed974-1052-4c7c-aece-d94f225c352b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1', 'attached_at': '', 'detached_at': '', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'serial': '4c7ed974-1052-4c7c-aece-d94f225c352b'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1187.105744] env[62627]: DEBUG nova.network.neutron [-] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.208181] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "30db4963-095b-4297-ad7c-0606451bd2b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.208181] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "30db4963-095b-4297-ad7c-0606451bd2b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.294377] env[62627]: DEBUG nova.network.neutron [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1187.306101] env[62627]: DEBUG nova.compute.manager [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1187.399942] env[62627]: DEBUG nova.compute.utils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1187.402996] env[62627]: INFO nova.compute.claims [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1187.407750] env[62627]: DEBUG nova.compute.manager [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1187.407750] env[62627]: DEBUG nova.network.neutron [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1187.481781] env[62627]: DEBUG nova.compute.manager [req-6028c3b3-f19c-4432-8cab-0ff5eaeae449 req-0b9623c4-5ff1-4a1a-97ad-66960cbea436 service nova] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Received event network-vif-deleted-9830a1ce-942e-4954-8513-f624e13d304c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1187.506897] env[62627]: DEBUG nova.policy [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4289413aec40498dab5f6170d69eb39a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74aed70194cf444884768a5c89fe8613', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1187.612138] env[62627]: INFO nova.compute.manager [-] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Took 1.50 seconds to deallocate network for instance. [ 1187.620167] env[62627]: DEBUG nova.network.neutron [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updating instance_info_cache with network_info: [{"id": "58815871-fdac-4406-a61e-4d7fca125122", "address": "fa:16:3e:69:fc:43", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58815871-fd", "ovs_interfaceid": "58815871-fdac-4406-a61e-4d7fca125122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.689719] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "b68042fd-4955-41e4-b14f-1f8268ace377" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.689719] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "b68042fd-4955-41e4-b14f-1f8268ace377" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.708174] env[62627]: DEBUG nova.compute.manager [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1187.836132] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.908570] env[62627]: DEBUG nova.compute.manager [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1187.912644] env[62627]: INFO nova.compute.resource_tracker [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating resource usage from migration 41203292-9d7e-4bc8-8a03-a07c60121f37 [ 1187.989676] env[62627]: DEBUG nova.network.neutron [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Successfully created port: d885a4e0-fbdd-41cf-9e49-1e16cd392ecf {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1188.124630] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.125327] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.125608] env[62627]: DEBUG nova.compute.manager [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Instance network_info: |[{"id": "58815871-fdac-4406-a61e-4d7fca125122", "address": "fa:16:3e:69:fc:43", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58815871-fd", "ovs_interfaceid": "58815871-fdac-4406-a61e-4d7fca125122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1188.126655] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:fc:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '023d6500-887e-4dc4-bec5-06b40450d9c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58815871-fdac-4406-a61e-4d7fca125122', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1188.134536] env[62627]: DEBUG oslo.service.loopingcall [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1188.135719] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1188.136510] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a3dbaa-7f52-40a3-b5e3-c610b300489f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.140608] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a22d0135-aa48-4eca-9f18-f64a0e9c1552 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.161345] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6c3e0b-d147-434a-a630-ca191aef6514 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.165701] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1188.165701] env[62627]: value = "task-2195143" [ 1188.165701] env[62627]: _type = "Task" [ 1188.165701] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.201161] env[62627]: DEBUG nova.objects.instance [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lazy-loading 'flavor' on Instance uuid f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1188.202738] env[62627]: DEBUG nova.compute.manager [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1188.209040] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ff4d39-e62b-4556-89e9-9d0ad2dab3a2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.218298] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195143, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.227225] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85087102-17e2-43f8-9c12-2ace0ce00fb9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.243526] env[62627]: DEBUG nova.compute.provider_tree [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1188.245944] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.509728] env[62627]: DEBUG oslo_vmware.rw_handles [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526d52c8-5303-48c2-125d-b4a72a243f3c/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1188.510764] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71539deb-beb1-45a4-958d-f92a1802f87f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.519358] env[62627]: DEBUG oslo_vmware.rw_handles [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526d52c8-5303-48c2-125d-b4a72a243f3c/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1188.519422] env[62627]: ERROR oslo_vmware.rw_handles [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526d52c8-5303-48c2-125d-b4a72a243f3c/disk-0.vmdk due to incomplete transfer. [ 1188.519665] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-df6f681c-8f79-4424-adb3-b613df799aa7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.526432] env[62627]: DEBUG oslo_vmware.rw_handles [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526d52c8-5303-48c2-125d-b4a72a243f3c/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1188.526674] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Uploaded image 0d9bb31a-1d73-47ee-a566-e579a137eff3 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1188.529036] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1188.529354] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fbf5cc5e-a1cb-475e-8ce1-e997812afc41 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.534806] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1188.534806] env[62627]: value = "task-2195144" [ 1188.534806] env[62627]: _type = "Task" [ 1188.534806] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.542577] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195144, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.678739] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195143, 'name': CreateVM_Task, 'duration_secs': 0.336666} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.679145] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1188.679588] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.679758] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.680219] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1188.680497] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb68ce5b-674c-4167-a309-a7ae35eae675 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.685076] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1188.685076] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52482137-e2d8-955c-cbd0-23cc78a44c98" [ 1188.685076] env[62627]: _type = "Task" [ 1188.685076] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.692628] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52482137-e2d8-955c-cbd0-23cc78a44c98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.709087] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a74b98a4-a68c-4b3b-8678-651c072b04d8 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.368s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.726355] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.748156] env[62627]: DEBUG nova.scheduler.client.report [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1188.925603] env[62627]: DEBUG nova.compute.manager [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1188.946744] env[62627]: DEBUG nova.virt.hardware [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1188.946985] env[62627]: DEBUG nova.virt.hardware [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1188.947166] env[62627]: DEBUG nova.virt.hardware [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1188.947351] env[62627]: DEBUG nova.virt.hardware [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1188.947576] env[62627]: DEBUG nova.virt.hardware [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1188.947738] env[62627]: DEBUG nova.virt.hardware [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1188.947941] env[62627]: DEBUG nova.virt.hardware [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1188.948112] env[62627]: DEBUG nova.virt.hardware [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1188.948280] env[62627]: DEBUG nova.virt.hardware [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1188.948464] env[62627]: DEBUG nova.virt.hardware [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1188.948647] env[62627]: DEBUG nova.virt.hardware [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1188.949532] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400eb6c9-ba3a-4de9-8ea6-597922ce0e75 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.957288] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c5e607-e1a6-4cda-9c39-8080d7469502 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.044586] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195144, 'name': Destroy_Task, 'duration_secs': 0.325193} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.044884] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Destroyed the VM [ 1189.045142] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1189.045409] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cd0f54b0-9911-4b07-9b83-bdadbe024540 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.051674] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1189.051674] env[62627]: value = "task-2195145" [ 1189.051674] env[62627]: _type = "Task" [ 1189.051674] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.059846] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195145, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.134621] env[62627]: INFO nova.compute.manager [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Rebuilding instance [ 1189.169653] env[62627]: DEBUG nova.compute.manager [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1189.170622] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aaca24e-6444-4bf2-85bb-80631253c9de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.195151] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52482137-e2d8-955c-cbd0-23cc78a44c98, 'name': SearchDatastore_Task, 'duration_secs': 0.009939} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.195447] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.195694] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1189.195947] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.196118] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.196317] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1189.196589] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f157380a-61ee-490b-bb2d-203eafb8bcf3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.204615] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1189.204811] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1189.205583] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b040232b-795e-48a5-a231-4bcc8518309a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.210532] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1189.210532] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5274fa84-298b-4785-74c8-5cb5ae14d97f" [ 1189.210532] env[62627]: _type = "Task" [ 1189.210532] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.222286] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5274fa84-298b-4785-74c8-5cb5ae14d97f, 'name': SearchDatastore_Task, 'duration_secs': 0.008337} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.223026] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd9ce35a-bc38-481e-88bd-b2161a755924 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.227985] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1189.227985] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fa3fe6-9627-797b-9b94-2fc0c54f7b9e" [ 1189.227985] env[62627]: _type = "Task" [ 1189.227985] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.235948] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fa3fe6-9627-797b-9b94-2fc0c54f7b9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.253051] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.358s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.253325] env[62627]: INFO nova.compute.manager [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Migrating [ 1189.264986] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.429s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.266669] env[62627]: INFO nova.compute.claims [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1189.535420] env[62627]: DEBUG nova.compute.manager [req-294ad2ca-a5f7-441f-9d98-521e55dfad5d req-7d03bbed-edd7-4834-b493-8f7f016557bb service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Received event network-vif-plugged-d885a4e0-fbdd-41cf-9e49-1e16cd392ecf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1189.535657] env[62627]: DEBUG oslo_concurrency.lockutils [req-294ad2ca-a5f7-441f-9d98-521e55dfad5d req-7d03bbed-edd7-4834-b493-8f7f016557bb service nova] Acquiring lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1189.535873] env[62627]: DEBUG oslo_concurrency.lockutils [req-294ad2ca-a5f7-441f-9d98-521e55dfad5d req-7d03bbed-edd7-4834-b493-8f7f016557bb service nova] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.536060] env[62627]: DEBUG oslo_concurrency.lockutils [req-294ad2ca-a5f7-441f-9d98-521e55dfad5d req-7d03bbed-edd7-4834-b493-8f7f016557bb service nova] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.536237] env[62627]: DEBUG nova.compute.manager [req-294ad2ca-a5f7-441f-9d98-521e55dfad5d req-7d03bbed-edd7-4834-b493-8f7f016557bb service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] No waiting events found dispatching network-vif-plugged-d885a4e0-fbdd-41cf-9e49-1e16cd392ecf {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1189.536403] env[62627]: WARNING nova.compute.manager [req-294ad2ca-a5f7-441f-9d98-521e55dfad5d req-7d03bbed-edd7-4834-b493-8f7f016557bb service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Received unexpected event network-vif-plugged-d885a4e0-fbdd-41cf-9e49-1e16cd392ecf for instance with vm_state building and task_state spawning. [ 1189.561213] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195145, 'name': RemoveSnapshot_Task, 'duration_secs': 0.330745} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.561488] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1189.561837] env[62627]: DEBUG nova.compute.manager [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1189.562564] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371371e4-32cc-4a3c-8983-a6777b56eff3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.565581] env[62627]: DEBUG nova.network.neutron [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Successfully updated port: d885a4e0-fbdd-41cf-9e49-1e16cd392ecf {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1189.738284] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fa3fe6-9627-797b-9b94-2fc0c54f7b9e, 'name': SearchDatastore_Task, 'duration_secs': 0.008295} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.738612] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.738812] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] c7e3246d-ffe7-4d81-a2ec-c5878649ed6b/c7e3246d-ffe7-4d81-a2ec-c5878649ed6b.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1189.739078] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd80802b-8fec-429c-8965-45303cb2b7d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.746206] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1189.746206] env[62627]: value = "task-2195146" [ 1189.746206] env[62627]: _type = "Task" [ 1189.746206] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.753943] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195146, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.775416] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.776084] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.776084] env[62627]: DEBUG nova.network.neutron [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.068648] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.068854] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.068954] env[62627]: DEBUG nova.network.neutron [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.078183] env[62627]: INFO nova.compute.manager [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Shelve offloading [ 1190.183855] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1190.184247] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4db68f9-36e9-423d-8a0e-2647a82bee4a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.194664] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1190.194664] env[62627]: value = "task-2195147" [ 1190.194664] env[62627]: _type = "Task" [ 1190.194664] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.208262] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195147, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.257037] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195146, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.480699] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda6d335-860d-4af1-9b6d-624c39ead3bb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.490257] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babb40e6-cdac-4397-8d43-9f097c89d9a3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.520309] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1344ff-10a4-4827-a23c-7febfb935c34 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.527359] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1c13fe-8982-4080-8f4c-de42e2a461cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.540582] env[62627]: DEBUG nova.compute.provider_tree [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1190.547154] env[62627]: DEBUG nova.network.neutron [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance_info_cache with network_info: [{"id": "332cfd7e-9934-429a-bc74-275d48f07fdb", "address": "fa:16:3e:68:4c:72", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap332cfd7e-99", "ovs_interfaceid": "332cfd7e-9934-429a-bc74-275d48f07fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.580862] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1190.581144] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebad2064-cf27-4d03-bc72-530035754571 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.589186] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1190.589186] env[62627]: value = "task-2195148" [ 1190.589186] env[62627]: _type = "Task" [ 1190.589186] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.597232] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.601334] env[62627]: DEBUG nova.network.neutron [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1190.704727] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195147, 'name': PowerOffVM_Task, 'duration_secs': 0.285192} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.704990] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1190.733446] env[62627]: DEBUG nova.network.neutron [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance_info_cache with network_info: [{"id": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "address": "fa:16:3e:c6:8b:aa", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd885a4e0-fb", "ovs_interfaceid": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.760488] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195146, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512118} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.761663] env[62627]: INFO nova.compute.manager [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Detaching volume 4c7ed974-1052-4c7c-aece-d94f225c352b [ 1190.763518] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] c7e3246d-ffe7-4d81-a2ec-c5878649ed6b/c7e3246d-ffe7-4d81-a2ec-c5878649ed6b.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1190.763759] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1190.764281] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0883ca17-a62f-47e4-b56f-7a81d5c8bad3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.771086] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1190.771086] env[62627]: value = "task-2195149" [ 1190.771086] env[62627]: _type = "Task" [ 1190.771086] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.780420] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195149, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.801624] env[62627]: INFO nova.virt.block_device [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Attempting to driver detach volume 4c7ed974-1052-4c7c-aece-d94f225c352b from mountpoint /dev/sdb [ 1190.801952] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1190.802207] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447803', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'name': 'volume-4c7ed974-1052-4c7c-aece-d94f225c352b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1', 'attached_at': '', 'detached_at': '', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'serial': '4c7ed974-1052-4c7c-aece-d94f225c352b'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1190.803155] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421e324d-aeb7-4de9-a335-adcaf5ca50f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.826936] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a40e71f-e621-4be1-8668-022fda5ea9c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.834967] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d2175d-699c-4611-8d6b-cb000de61d0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.855835] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c2a1f7-fc3c-469e-875c-580e01368d2b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.870794] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] The volume has not been displaced from its original location: [datastore2] volume-4c7ed974-1052-4c7c-aece-d94f225c352b/volume-4c7ed974-1052-4c7c-aece-d94f225c352b.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1190.876079] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfiguring VM instance instance-00000056 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1190.876397] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9afaed73-3837-4ec8-8e24-c88a466efece {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.893999] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1190.893999] env[62627]: value = "task-2195150" [ 1190.893999] env[62627]: _type = "Task" [ 1190.893999] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.901885] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195150, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.047774] env[62627]: DEBUG nova.scheduler.client.report [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1191.051432] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.099920] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1191.100150] env[62627]: DEBUG nova.compute.manager [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1191.100868] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c07d8c8-5d61-47fe-ab5c-c50a511ff29c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.106233] env[62627]: DEBUG oslo_concurrency.lockutils [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.106394] env[62627]: DEBUG oslo_concurrency.lockutils [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.106561] env[62627]: DEBUG nova.network.neutron [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1191.235985] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.236353] env[62627]: DEBUG nova.compute.manager [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Instance network_info: |[{"id": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "address": "fa:16:3e:c6:8b:aa", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd885a4e0-fb", "ovs_interfaceid": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1191.236812] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:8b:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '098df9b7-d759-47f7-b756-334848cb423b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd885a4e0-fbdd-41cf-9e49-1e16cd392ecf', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1191.244407] env[62627]: DEBUG oslo.service.loopingcall [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1191.244633] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1191.245215] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3323e9c4-fe29-476c-9fac-f640486de0f8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.267800] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1191.267800] env[62627]: value = "task-2195151" [ 1191.267800] env[62627]: _type = "Task" [ 1191.267800] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.276721] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195151, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.281174] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195149, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180383} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.281431] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1191.282391] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f98dbd-4aba-4dd9-9fd3-c947391910d0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.303408] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] c7e3246d-ffe7-4d81-a2ec-c5878649ed6b/c7e3246d-ffe7-4d81-a2ec-c5878649ed6b.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1191.303701] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75cb34ba-3e2b-4feb-a069-03f0429afaaa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.321821] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1191.321821] env[62627]: value = "task-2195152" [ 1191.321821] env[62627]: _type = "Task" [ 1191.321821] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.329598] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195152, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.403038] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195150, 'name': ReconfigVM_Task, 'duration_secs': 0.187803} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.403328] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfigured VM instance instance-00000056 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1191.409482] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1feb00e2-e9e7-4775-8681-af4baedda07d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.424596] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1191.424596] env[62627]: value = "task-2195153" [ 1191.424596] env[62627]: _type = "Task" [ 1191.424596] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.433085] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195153, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.553836] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.289s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.554526] env[62627]: DEBUG nova.compute.manager [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1191.562066] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.437s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.562066] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.564205] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.318s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.565917] env[62627]: INFO nova.compute.claims [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1191.572662] env[62627]: DEBUG nova.compute.manager [req-2c3f5286-cf06-44bd-97a0-502a4d1eef57 req-e549b0d6-4927-4a06-a919-d51eebf6944e service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Received event network-changed-d885a4e0-fbdd-41cf-9e49-1e16cd392ecf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1191.572866] env[62627]: DEBUG nova.compute.manager [req-2c3f5286-cf06-44bd-97a0-502a4d1eef57 req-e549b0d6-4927-4a06-a919-d51eebf6944e service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Refreshing instance network info cache due to event network-changed-d885a4e0-fbdd-41cf-9e49-1e16cd392ecf. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1191.573091] env[62627]: DEBUG oslo_concurrency.lockutils [req-2c3f5286-cf06-44bd-97a0-502a4d1eef57 req-e549b0d6-4927-4a06-a919-d51eebf6944e service nova] Acquiring lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.573241] env[62627]: DEBUG oslo_concurrency.lockutils [req-2c3f5286-cf06-44bd-97a0-502a4d1eef57 req-e549b0d6-4927-4a06-a919-d51eebf6944e service nova] Acquired lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.573414] env[62627]: DEBUG nova.network.neutron [req-2c3f5286-cf06-44bd-97a0-502a4d1eef57 req-e549b0d6-4927-4a06-a919-d51eebf6944e service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Refreshing network info cache for port d885a4e0-fbdd-41cf-9e49-1e16cd392ecf {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1191.593610] env[62627]: INFO nova.scheduler.client.report [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted allocations for instance 634d547b-3b5e-4185-8229-daf055ca6e69 [ 1191.778236] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195151, 'name': CreateVM_Task, 'duration_secs': 0.412379} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.778527] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1191.779229] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.779475] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.779846] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1191.780125] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df3c9ee9-77a4-42fa-9220-a6b36fe6ef29 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.784551] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1191.784551] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52da164a-8dc8-45bb-6df9-ab47c03231f5" [ 1191.784551] env[62627]: _type = "Task" [ 1191.784551] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.792054] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52da164a-8dc8-45bb-6df9-ab47c03231f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.830648] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195152, 'name': ReconfigVM_Task, 'duration_secs': 0.383353} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.830910] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Reconfigured VM instance instance-0000005f to attach disk [datastore2] c7e3246d-ffe7-4d81-a2ec-c5878649ed6b/c7e3246d-ffe7-4d81-a2ec-c5878649ed6b.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1191.831732] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c33be072-f4e2-425b-a024-ddc4cec609cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.838122] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1191.838122] env[62627]: value = "task-2195154" [ 1191.838122] env[62627]: _type = "Task" [ 1191.838122] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.847356] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195154, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.936433] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195153, 'name': ReconfigVM_Task, 'duration_secs': 0.231725} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.936745] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447803', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'name': 'volume-4c7ed974-1052-4c7c-aece-d94f225c352b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1', 'attached_at': '', 'detached_at': '', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'serial': '4c7ed974-1052-4c7c-aece-d94f225c352b'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1192.065020] env[62627]: DEBUG nova.compute.utils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1192.065020] env[62627]: DEBUG nova.compute.manager [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1192.065675] env[62627]: DEBUG nova.network.neutron [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1192.107898] env[62627]: DEBUG oslo_concurrency.lockutils [None req-96daf845-dd40-4456-8ba3-78dae5c6a720 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "634d547b-3b5e-4185-8229-daf055ca6e69" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.321s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.136945] env[62627]: DEBUG nova.policy [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b8b518671484a0ca92516d1bb4715c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'beed9c3722f6470e8a1ed44281fe57e5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1192.153865] env[62627]: DEBUG nova.network.neutron [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updating instance_info_cache with network_info: [{"id": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "address": "fa:16:3e:85:08:41", "network": {"id": "280614ef-fbde-41d9-927f-7471cc50aa14", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-970093695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d5055e1a674886a161a96b0e095451", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap823150e9-f3", "ovs_interfaceid": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.298211] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52da164a-8dc8-45bb-6df9-ab47c03231f5, 'name': SearchDatastore_Task, 'duration_secs': 0.009362} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.298565] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.298827] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1192.299189] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.299291] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.299504] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1192.299792] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c16294ab-0048-412c-a13f-4b2cf0fe54a2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.310198] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1192.310198] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1192.310198] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fae69569-e01d-4755-9fdd-1307cd3c1c6c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.315587] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1192.315587] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3a6fb-432f-972b-386f-126c1bb74a26" [ 1192.315587] env[62627]: _type = "Task" [ 1192.315587] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.324132] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3a6fb-432f-972b-386f-126c1bb74a26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.330009] env[62627]: DEBUG nova.network.neutron [req-2c3f5286-cf06-44bd-97a0-502a4d1eef57 req-e549b0d6-4927-4a06-a919-d51eebf6944e service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updated VIF entry in instance network info cache for port d885a4e0-fbdd-41cf-9e49-1e16cd392ecf. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1192.330345] env[62627]: DEBUG nova.network.neutron [req-2c3f5286-cf06-44bd-97a0-502a4d1eef57 req-e549b0d6-4927-4a06-a919-d51eebf6944e service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance_info_cache with network_info: [{"id": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "address": "fa:16:3e:c6:8b:aa", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd885a4e0-fb", "ovs_interfaceid": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.347685] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195154, 'name': Rename_Task, 'duration_secs': 0.132265} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.347991] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1192.348314] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-466beca8-f353-4ac6-abd6-39960f4e1d20 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.354544] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1192.354544] env[62627]: value = "task-2195155" [ 1192.354544] env[62627]: _type = "Task" [ 1192.354544] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.364354] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.457757] env[62627]: DEBUG nova.network.neutron [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Successfully created port: f1d904fd-a7a5-45af-a796-47dcf5bf2e4e {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1192.571871] env[62627]: DEBUG nova.compute.manager [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1192.578064] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cb635b-5d14-414d-839d-9919f207565f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.599877] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance 'd2d4dd6b-d5c5-402b-b165-4c9ae1e36b93' progress to 0 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1192.657860] env[62627]: DEBUG oslo_concurrency.lockutils [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.812151] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8058a1a1-c8d6-411a-b751-b908c9b7b937 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.824725] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be59bb7e-58e1-4f42-9286-56740f437c72 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.836958] env[62627]: DEBUG oslo_concurrency.lockutils [req-2c3f5286-cf06-44bd-97a0-502a4d1eef57 req-e549b0d6-4927-4a06-a919-d51eebf6944e service nova] Releasing lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.839028] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3a6fb-432f-972b-386f-126c1bb74a26, 'name': SearchDatastore_Task, 'duration_secs': 0.008362} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.866793] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd40ac44-02d0-441d-85a8-3a35bbc1d41b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.872464] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14c69be-7db3-4b47-84d7-cb32ddc36a00 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.878626] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1192.878626] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fff7f2-9b05-d58a-e60c-4d87db3b82ba" [ 1192.878626] env[62627]: _type = "Task" [ 1192.878626] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.883481] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195155, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.887411] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a72d1c0-d373-414c-a8a2-aca4e9fd9112 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.896199] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fff7f2-9b05-d58a-e60c-4d87db3b82ba, 'name': SearchDatastore_Task, 'duration_secs': 0.008964} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.903904] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.904244] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9/36a8d4d3-d6b6-4b42-9514-0e29ee45aea9.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1192.904749] env[62627]: DEBUG nova.compute.provider_tree [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1192.906113] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f90dfe56-336b-44e6-ba82-c0336cb01c6e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.914921] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1192.914921] env[62627]: value = "task-2195156" [ 1192.914921] env[62627]: _type = "Task" [ 1192.914921] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.922992] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195156, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.952792] env[62627]: DEBUG nova.compute.manager [req-ab81245c-fefa-430d-ab0f-7b9def465308 req-376d8f40-f212-4d6d-a2e3-5e837bf188cd service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Received event network-vif-unplugged-823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1192.952908] env[62627]: DEBUG oslo_concurrency.lockutils [req-ab81245c-fefa-430d-ab0f-7b9def465308 req-376d8f40-f212-4d6d-a2e3-5e837bf188cd service nova] Acquiring lock "b282093d-8013-436d-94d2-bc26161456ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.953291] env[62627]: DEBUG oslo_concurrency.lockutils [req-ab81245c-fefa-430d-ab0f-7b9def465308 req-376d8f40-f212-4d6d-a2e3-5e837bf188cd service nova] Lock "b282093d-8013-436d-94d2-bc26161456ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.953486] env[62627]: DEBUG oslo_concurrency.lockutils [req-ab81245c-fefa-430d-ab0f-7b9def465308 req-376d8f40-f212-4d6d-a2e3-5e837bf188cd service nova] Lock "b282093d-8013-436d-94d2-bc26161456ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.953683] env[62627]: DEBUG nova.compute.manager [req-ab81245c-fefa-430d-ab0f-7b9def465308 req-376d8f40-f212-4d6d-a2e3-5e837bf188cd service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] No waiting events found dispatching network-vif-unplugged-823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1192.954081] env[62627]: WARNING nova.compute.manager [req-ab81245c-fefa-430d-ab0f-7b9def465308 req-376d8f40-f212-4d6d-a2e3-5e837bf188cd service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Received unexpected event network-vif-unplugged-823150e9-f3ee-44ab-a36b-c8abdca8b70e for instance with vm_state shelved and task_state shelving_offloading. [ 1192.986519] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1192.987741] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d37e5b-fbf3-492d-b99d-a2d77ccb8cf9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.992262] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1192.992496] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07941a86-20c1-4512-8117-200092751861 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.996284] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1192.996891] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-330434eb-c7c6-4ce3-9b92-39763fee611d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.000939] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1193.000939] env[62627]: value = "task-2195157" [ 1193.000939] env[62627]: _type = "Task" [ 1193.000939] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.013586] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1193.013819] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1193.014046] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447803', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'name': 'volume-4c7ed974-1052-4c7c-aece-d94f225c352b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1', 'attached_at': '', 'detached_at': '', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'serial': '4c7ed974-1052-4c7c-aece-d94f225c352b'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1193.014842] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d21167-5ec8-47f5-92c9-38ac7ab2f354 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.036184] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3703f8-68e1-4c26-bf8f-a97692529fee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.043426] env[62627]: WARNING nova.virt.vmwareapi.driver [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1193.045020] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1193.045020] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c10650-ebf7-4a92-80af-ff13641b37a4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.051301] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1193.051521] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7be1da4c-c26c-42a7-ac3d-0cf10467f873 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.066582] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1193.066811] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1193.066992] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Deleting the datastore file [datastore1] b282093d-8013-436d-94d2-bc26161456ea {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1193.067254] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ad03078-3c3c-416e-aa23-cca5efa4af2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.074205] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1193.074205] env[62627]: value = "task-2195160" [ 1193.074205] env[62627]: _type = "Task" [ 1193.074205] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.088133] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195160, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.105739] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1193.106075] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3273f45d-18d2-4919-9912-cf394589b11a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.113113] env[62627]: DEBUG oslo_vmware.api [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1193.113113] env[62627]: value = "task-2195161" [ 1193.113113] env[62627]: _type = "Task" [ 1193.113113] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.123629] env[62627]: DEBUG oslo_vmware.api [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.142048] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1193.142296] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1193.142488] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleting the datastore file [datastore2] f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1193.142769] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b130816-677b-49a6-809d-23f77d16160d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.149316] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1193.149316] env[62627]: value = "task-2195162" [ 1193.149316] env[62627]: _type = "Task" [ 1193.149316] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.159522] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195162, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.381072] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195155, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.411993] env[62627]: DEBUG nova.scheduler.client.report [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1193.427898] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195156, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470881} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.428182] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9/36a8d4d3-d6b6-4b42-9514-0e29ee45aea9.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1193.428392] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1193.428664] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23d6ae8e-dd96-4a72-a915-b81110a61951 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.436040] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1193.436040] env[62627]: value = "task-2195163" [ 1193.436040] env[62627]: _type = "Task" [ 1193.436040] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.443551] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195163, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.584877] env[62627]: DEBUG nova.compute.manager [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1193.587045] env[62627]: DEBUG oslo_vmware.api [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195160, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.321232} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.587584] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1193.587782] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1193.587957] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1193.608633] env[62627]: INFO nova.scheduler.client.report [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Deleted allocations for instance b282093d-8013-436d-94d2-bc26161456ea [ 1193.613654] env[62627]: DEBUG nova.virt.hardware [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1193.613876] env[62627]: DEBUG nova.virt.hardware [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1193.614049] env[62627]: DEBUG nova.virt.hardware [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1193.614240] env[62627]: DEBUG nova.virt.hardware [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1193.614386] env[62627]: DEBUG nova.virt.hardware [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1193.614530] env[62627]: DEBUG nova.virt.hardware [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1193.614738] env[62627]: DEBUG nova.virt.hardware [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1193.614895] env[62627]: DEBUG nova.virt.hardware [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1193.615070] env[62627]: DEBUG nova.virt.hardware [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1193.615237] env[62627]: DEBUG nova.virt.hardware [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1193.615495] env[62627]: DEBUG nova.virt.hardware [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1193.616404] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3d5837-4114-4ab6-b820-e7e8d1f9310a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.631066] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5f0608-f0a0-43a0-a8e1-d60976d9390b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.636935] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1193.637126] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance 'd2d4dd6b-d5c5-402b-b165-4c9ae1e36b93' progress to 17 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1193.660060] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195162, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.334096} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.660247] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1193.660407] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1193.660577] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1193.845463] env[62627]: DEBUG nova.compute.manager [req-f719c70e-c023-4e11-8fe5-e79915851bb0 req-bfd63c8b-2ef5-4377-9cc4-fd5c14336b1f service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Received event network-vif-plugged-f1d904fd-a7a5-45af-a796-47dcf5bf2e4e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1193.845644] env[62627]: DEBUG oslo_concurrency.lockutils [req-f719c70e-c023-4e11-8fe5-e79915851bb0 req-bfd63c8b-2ef5-4377-9cc4-fd5c14336b1f service nova] Acquiring lock "722d7bce-f0d7-4891-aa48-0ead10786a6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.845690] env[62627]: DEBUG oslo_concurrency.lockutils [req-f719c70e-c023-4e11-8fe5-e79915851bb0 req-bfd63c8b-2ef5-4377-9cc4-fd5c14336b1f service nova] Lock "722d7bce-f0d7-4891-aa48-0ead10786a6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.845875] env[62627]: DEBUG oslo_concurrency.lockutils [req-f719c70e-c023-4e11-8fe5-e79915851bb0 req-bfd63c8b-2ef5-4377-9cc4-fd5c14336b1f service nova] Lock "722d7bce-f0d7-4891-aa48-0ead10786a6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.847071] env[62627]: DEBUG nova.compute.manager [req-f719c70e-c023-4e11-8fe5-e79915851bb0 req-bfd63c8b-2ef5-4377-9cc4-fd5c14336b1f service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] No waiting events found dispatching network-vif-plugged-f1d904fd-a7a5-45af-a796-47dcf5bf2e4e {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1193.847293] env[62627]: WARNING nova.compute.manager [req-f719c70e-c023-4e11-8fe5-e79915851bb0 req-bfd63c8b-2ef5-4377-9cc4-fd5c14336b1f service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Received unexpected event network-vif-plugged-f1d904fd-a7a5-45af-a796-47dcf5bf2e4e for instance with vm_state building and task_state spawning. [ 1193.878034] env[62627]: DEBUG oslo_vmware.api [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195155, 'name': PowerOnVM_Task, 'duration_secs': 1.291832} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.878245] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1193.878608] env[62627]: INFO nova.compute.manager [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Took 10.04 seconds to spawn the instance on the hypervisor. [ 1193.878847] env[62627]: DEBUG nova.compute.manager [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1193.879675] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec1a7c8-d793-482a-84db-c80353328ecc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.926203] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.929356] env[62627]: DEBUG nova.compute.manager [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1193.929875] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.204s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.932800] env[62627]: INFO nova.compute.claims [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1193.980848] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195163, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070237} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.980848] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1193.980848] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7b9307-74bf-4b32-974e-9ae9c5c2bc88 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.980848] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9/36a8d4d3-d6b6-4b42-9514-0e29ee45aea9.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1193.980848] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79f96567-173c-45c0-b8b1-c80d41eabdb6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.990148] env[62627]: DEBUG nova.network.neutron [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Successfully updated port: f1d904fd-a7a5-45af-a796-47dcf5bf2e4e {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1194.003284] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1194.003284] env[62627]: value = "task-2195164" [ 1194.003284] env[62627]: _type = "Task" [ 1194.003284] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.026579] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.113207] env[62627]: DEBUG oslo_concurrency.lockutils [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.144020] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1194.144338] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1194.144539] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1194.144758] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1194.144937] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1194.145157] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1194.145430] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1194.145643] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1194.145854] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1194.146062] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1194.146276] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1194.151682] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28174950-bdff-47e1-8d6b-68d6bd093992 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.164416] env[62627]: INFO nova.virt.block_device [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Booting with volume 4c7ed974-1052-4c7c-aece-d94f225c352b at /dev/sdb [ 1194.167416] env[62627]: DEBUG oslo_vmware.api [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1194.167416] env[62627]: value = "task-2195165" [ 1194.167416] env[62627]: _type = "Task" [ 1194.167416] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.178160] env[62627]: DEBUG oslo_vmware.api [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195165, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.200808] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4bfadf9-d1c7-4fce-889d-4f1a942b0342 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.209274] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63b99e6-03d8-4347-9b0e-b842d927767d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.238760] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a619bf06-6502-4607-a80f-82752a980331 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.246974] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb20cbea-98cb-4033-b894-5c1318c1cee5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.275636] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d778a57-3666-4352-a3d0-b610ce5288df {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.281860] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5029b9a-7937-46d5-9ef7-741175be8a1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.294435] env[62627]: DEBUG nova.virt.block_device [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Updating existing volume attachment record: 98b7171b-4d56-4cd7-9f38-3fc0c53d7766 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1194.396131] env[62627]: INFO nova.compute.manager [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Took 17.10 seconds to build instance. [ 1194.440692] env[62627]: DEBUG nova.compute.utils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1194.444608] env[62627]: DEBUG nova.compute.manager [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1194.444777] env[62627]: DEBUG nova.network.neutron [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1194.492881] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquiring lock "refresh_cache-722d7bce-f0d7-4891-aa48-0ead10786a6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.493053] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquired lock "refresh_cache-722d7bce-f0d7-4891-aa48-0ead10786a6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.493217] env[62627]: DEBUG nova.network.neutron [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1194.513790] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.544667] env[62627]: DEBUG nova.policy [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8960d0a8ce744e8b0536074751e9d0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f41b0c171180469e9092b8a8da697a78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1194.677198] env[62627]: DEBUG oslo_vmware.api [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195165, 'name': ReconfigVM_Task, 'duration_secs': 0.462425} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.677544] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance 'd2d4dd6b-d5c5-402b-b165-4c9ae1e36b93' progress to 33 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1194.898076] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5a8b7cce-93b5-4ee9-a0ba-5d7d1c2052d0 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.617s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.945336] env[62627]: DEBUG nova.compute.manager [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1194.978936] env[62627]: DEBUG nova.compute.manager [req-a33636da-8de0-4003-9c27-d2bc09ed603f req-88623c28-bda6-4012-8ef7-d4ef4d1eb501 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Received event network-changed-823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1194.979075] env[62627]: DEBUG nova.compute.manager [req-a33636da-8de0-4003-9c27-d2bc09ed603f req-88623c28-bda6-4012-8ef7-d4ef4d1eb501 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Refreshing instance network info cache due to event network-changed-823150e9-f3ee-44ab-a36b-c8abdca8b70e. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1194.979295] env[62627]: DEBUG oslo_concurrency.lockutils [req-a33636da-8de0-4003-9c27-d2bc09ed603f req-88623c28-bda6-4012-8ef7-d4ef4d1eb501 service nova] Acquiring lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.979425] env[62627]: DEBUG oslo_concurrency.lockutils [req-a33636da-8de0-4003-9c27-d2bc09ed603f req-88623c28-bda6-4012-8ef7-d4ef4d1eb501 service nova] Acquired lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.979586] env[62627]: DEBUG nova.network.neutron [req-a33636da-8de0-4003-9c27-d2bc09ed603f req-88623c28-bda6-4012-8ef7-d4ef4d1eb501 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Refreshing network info cache for port 823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1195.015680] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195164, 'name': ReconfigVM_Task, 'duration_secs': 0.847159} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.017776] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9/36a8d4d3-d6b6-4b42-9514-0e29ee45aea9.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1195.019244] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19efc1a1-d0e2-4917-9d68-ed166108c49b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.025585] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1195.025585] env[62627]: value = "task-2195166" [ 1195.025585] env[62627]: _type = "Task" [ 1195.025585] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.034877] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195166, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.035913] env[62627]: DEBUG nova.network.neutron [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1195.126019] env[62627]: DEBUG nova.network.neutron [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Successfully created port: c9e055e0-0774-4741-a16c-139233d6803d {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1195.185747] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1195.185747] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1195.185747] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1195.185747] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1195.185936] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1195.185977] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1195.186201] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1195.186337] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1195.186495] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1195.186657] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1195.187172] env[62627]: DEBUG nova.virt.hardware [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1195.193109] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Reconfiguring VM instance instance-0000003b to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1195.195601] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91acd5cf-f2fd-41da-ace2-83fb7a226dc9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.219751] env[62627]: DEBUG oslo_vmware.api [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1195.219751] env[62627]: value = "task-2195167" [ 1195.219751] env[62627]: _type = "Task" [ 1195.219751] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.229110] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daa0fc9-6c89-45b5-94a1-fc357cfd2357 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.233219] env[62627]: DEBUG oslo_vmware.api [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195167, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.238557] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767a39ab-828c-48ac-bab9-bd22eded754f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.272155] env[62627]: DEBUG nova.network.neutron [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Updating instance_info_cache with network_info: [{"id": "f1d904fd-a7a5-45af-a796-47dcf5bf2e4e", "address": "fa:16:3e:ca:18:c9", "network": {"id": "7962cd8d-1d16-483a-9a80-0f3c9303a227", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1727604738-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "beed9c3722f6470e8a1ed44281fe57e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d904fd-a7", "ovs_interfaceid": "f1d904fd-a7a5-45af-a796-47dcf5bf2e4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.273866] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c1b7a6-268d-4386-88c7-53ce96266519 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.282365] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0237ce44-09d6-44a1-90eb-53c7d639d135 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.296902] env[62627]: DEBUG nova.compute.provider_tree [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.537642] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195166, 'name': Rename_Task, 'duration_secs': 0.269106} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.538031] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1195.538340] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f45749d-3c41-4a78-8bbf-98e11897f0e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.546577] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1195.546577] env[62627]: value = "task-2195168" [ 1195.546577] env[62627]: _type = "Task" [ 1195.546577] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.556859] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195168, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.739108] env[62627]: DEBUG oslo_vmware.api [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195167, 'name': ReconfigVM_Task, 'duration_secs': 0.327742} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.739906] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Reconfigured VM instance instance-0000003b to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1195.743610] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0316a631-b540-46dd-9ccb-7d4b9c4dfbc9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.768581] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93/d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1195.770147] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32344bdb-a653-4dcc-9bd0-83885165f63c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.786255] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Releasing lock "refresh_cache-722d7bce-f0d7-4891-aa48-0ead10786a6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.786527] env[62627]: DEBUG nova.compute.manager [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Instance network_info: |[{"id": "f1d904fd-a7a5-45af-a796-47dcf5bf2e4e", "address": "fa:16:3e:ca:18:c9", "network": {"id": "7962cd8d-1d16-483a-9a80-0f3c9303a227", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1727604738-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "beed9c3722f6470e8a1ed44281fe57e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d904fd-a7", "ovs_interfaceid": "f1d904fd-a7a5-45af-a796-47dcf5bf2e4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1195.787088] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:18:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1d904fd-a7a5-45af-a796-47dcf5bf2e4e', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1195.795490] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Creating folder: Project (beed9c3722f6470e8a1ed44281fe57e5). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1195.796842] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c5565b8-fefc-4c99-8571-570f5cab6367 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.799272] env[62627]: DEBUG nova.scheduler.client.report [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1195.806035] env[62627]: DEBUG oslo_vmware.api [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1195.806035] env[62627]: value = "task-2195169" [ 1195.806035] env[62627]: _type = "Task" [ 1195.806035] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.806035] env[62627]: DEBUG nova.network.neutron [req-a33636da-8de0-4003-9c27-d2bc09ed603f req-88623c28-bda6-4012-8ef7-d4ef4d1eb501 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updated VIF entry in instance network info cache for port 823150e9-f3ee-44ab-a36b-c8abdca8b70e. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1195.806035] env[62627]: DEBUG nova.network.neutron [req-a33636da-8de0-4003-9c27-d2bc09ed603f req-88623c28-bda6-4012-8ef7-d4ef4d1eb501 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updating instance_info_cache with network_info: [{"id": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "address": "fa:16:3e:85:08:41", "network": {"id": "280614ef-fbde-41d9-927f-7471cc50aa14", "bridge": null, "label": "tempest-ServersNegativeTestJSON-970093695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d5055e1a674886a161a96b0e095451", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap823150e9-f3", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.810975] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Created folder: Project (beed9c3722f6470e8a1ed44281fe57e5) in parent group-v447541. [ 1195.811180] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Creating folder: Instances. Parent ref: group-v447806. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1195.811433] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6b04b4b-1629-4721-b985-d7820b53e8da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.816752] env[62627]: DEBUG oslo_vmware.api [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195169, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.825319] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Created folder: Instances in parent group-v447806. [ 1195.825573] env[62627]: DEBUG oslo.service.loopingcall [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1195.825772] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1195.826206] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97680961-45f3-4318-82c6-0b5b3052c9ee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.845301] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1195.845301] env[62627]: value = "task-2195172" [ 1195.845301] env[62627]: _type = "Task" [ 1195.845301] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.853102] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195172, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.872345] env[62627]: DEBUG nova.compute.manager [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Received event network-changed-f1d904fd-a7a5-45af-a796-47dcf5bf2e4e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1195.872509] env[62627]: DEBUG nova.compute.manager [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Refreshing instance network info cache due to event network-changed-f1d904fd-a7a5-45af-a796-47dcf5bf2e4e. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1195.872726] env[62627]: DEBUG oslo_concurrency.lockutils [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] Acquiring lock "refresh_cache-722d7bce-f0d7-4891-aa48-0ead10786a6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.873427] env[62627]: DEBUG oslo_concurrency.lockutils [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] Acquired lock "refresh_cache-722d7bce-f0d7-4891-aa48-0ead10786a6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.873427] env[62627]: DEBUG nova.network.neutron [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Refreshing network info cache for port f1d904fd-a7a5-45af-a796-47dcf5bf2e4e {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1195.958356] env[62627]: DEBUG nova.compute.manager [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1195.986438] env[62627]: DEBUG nova.virt.hardware [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1195.986685] env[62627]: DEBUG nova.virt.hardware [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1195.986846] env[62627]: DEBUG nova.virt.hardware [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1195.987048] env[62627]: DEBUG nova.virt.hardware [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1195.987211] env[62627]: DEBUG nova.virt.hardware [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1195.987364] env[62627]: DEBUG nova.virt.hardware [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1195.987603] env[62627]: DEBUG nova.virt.hardware [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1195.987770] env[62627]: DEBUG nova.virt.hardware [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1195.987939] env[62627]: DEBUG nova.virt.hardware [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1195.988112] env[62627]: DEBUG nova.virt.hardware [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1195.988289] env[62627]: DEBUG nova.virt.hardware [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1195.989192] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16aa9b0e-e4f5-406d-b6d3-8e2eec813195 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.997488] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d3b88a-29c9-4e21-9ed9-5936d5429bed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.054733] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195168, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.206105] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.206336] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.287526] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "b282093d-8013-436d-94d2-bc26161456ea" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.306218] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.375s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.306218] env[62627]: DEBUG nova.compute.manager [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1196.310991] env[62627]: DEBUG oslo_concurrency.lockutils [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.198s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.311501] env[62627]: DEBUG nova.objects.instance [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lazy-loading 'resources' on Instance uuid b282093d-8013-436d-94d2-bc26161456ea {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.312921] env[62627]: DEBUG oslo_concurrency.lockutils [req-a33636da-8de0-4003-9c27-d2bc09ed603f req-88623c28-bda6-4012-8ef7-d4ef4d1eb501 service nova] Releasing lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.322686] env[62627]: DEBUG oslo_vmware.api [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195169, 'name': ReconfigVM_Task, 'duration_secs': 0.422831} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.322969] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Reconfigured VM instance instance-0000003b to attach disk [datastore2] d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93/d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1196.323293] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance 'd2d4dd6b-d5c5-402b-b165-4c9ae1e36b93' progress to 50 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1196.354898] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195172, 'name': CreateVM_Task, 'duration_secs': 0.354303} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.355086] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1196.355783] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.356133] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.356294] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1196.356553] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f151d09e-773f-4d30-a632-c6c479e9a2db {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.362781] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for the task: (returnval){ [ 1196.362781] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224cec2-80ad-2a6a-95cb-be73781b5b38" [ 1196.362781] env[62627]: _type = "Task" [ 1196.362781] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.370929] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224cec2-80ad-2a6a-95cb-be73781b5b38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.429993] env[62627]: DEBUG nova.virt.hardware [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1196.430301] env[62627]: DEBUG nova.virt.hardware [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1196.430492] env[62627]: DEBUG nova.virt.hardware [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1196.430657] env[62627]: DEBUG nova.virt.hardware [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1196.431424] env[62627]: DEBUG nova.virt.hardware [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1196.431424] env[62627]: DEBUG nova.virt.hardware [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1196.431424] env[62627]: DEBUG nova.virt.hardware [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1196.431424] env[62627]: DEBUG nova.virt.hardware [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1196.431584] env[62627]: DEBUG nova.virt.hardware [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1196.431777] env[62627]: DEBUG nova.virt.hardware [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1196.431991] env[62627]: DEBUG nova.virt.hardware [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1196.432898] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3bae9e-6451-473a-b6c2-a50ca0d5823e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.441075] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffd2f0a-4570-4cef-86a8-90bf73fd5877 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.457996] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:63:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f499bc9-78da-46c1-9274-19edf26d31cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '291e9d98-ac72-43f7-b745-9f0d290580fa', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1196.465586] env[62627]: DEBUG oslo.service.loopingcall [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.469260] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1196.471853] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6eaea57e-6138-4203-b5e5-46513703c9a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.493572] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1196.493572] env[62627]: value = "task-2195173" [ 1196.493572] env[62627]: _type = "Task" [ 1196.493572] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.502751] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195173, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.557538] env[62627]: DEBUG oslo_vmware.api [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195168, 'name': PowerOnVM_Task, 'duration_secs': 0.678441} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.557789] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1196.558034] env[62627]: INFO nova.compute.manager [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Took 7.63 seconds to spawn the instance on the hypervisor. [ 1196.558182] env[62627]: DEBUG nova.compute.manager [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1196.558959] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7beedd-55ab-4e69-a24b-cbd83f5011e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.696914] env[62627]: DEBUG nova.network.neutron [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Updated VIF entry in instance network info cache for port f1d904fd-a7a5-45af-a796-47dcf5bf2e4e. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1196.697329] env[62627]: DEBUG nova.network.neutron [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Updating instance_info_cache with network_info: [{"id": "f1d904fd-a7a5-45af-a796-47dcf5bf2e4e", "address": "fa:16:3e:ca:18:c9", "network": {"id": "7962cd8d-1d16-483a-9a80-0f3c9303a227", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1727604738-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "beed9c3722f6470e8a1ed44281fe57e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d904fd-a7", "ovs_interfaceid": "f1d904fd-a7a5-45af-a796-47dcf5bf2e4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.709245] env[62627]: DEBUG nova.compute.manager [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1196.813127] env[62627]: DEBUG nova.compute.utils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1196.815388] env[62627]: DEBUG nova.objects.instance [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lazy-loading 'numa_topology' on Instance uuid b282093d-8013-436d-94d2-bc26161456ea {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.818026] env[62627]: DEBUG nova.compute.manager [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1196.818026] env[62627]: DEBUG nova.network.neutron [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1196.831031] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d025ea-af3b-4989-ae12-3f0325203548 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.851909] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6233ca8c-85e0-4517-9db0-8dfd1cc10a7a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.872299] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance 'd2d4dd6b-d5c5-402b-b165-4c9ae1e36b93' progress to 67 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1196.883686] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224cec2-80ad-2a6a-95cb-be73781b5b38, 'name': SearchDatastore_Task, 'duration_secs': 0.010762} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.883686] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.883686] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1196.883899] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.884044] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.884227] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1196.884479] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-689f0ed9-9e36-4b75-a9f4-66f56b853564 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.891501] env[62627]: DEBUG nova.policy [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e72f59cc4a02460088e86ad29af1e0ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97c11d791dcf4f94a8e8bda7a6f014cd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1196.898159] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1196.898346] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1196.899086] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b318772f-48de-44a4-a213-9b27e4fc8aee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.905226] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for the task: (returnval){ [ 1196.905226] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f4a262-dcc0-550e-c988-33016ccc4daf" [ 1196.905226] env[62627]: _type = "Task" [ 1196.905226] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.913140] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f4a262-dcc0-550e-c988-33016ccc4daf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.936070] env[62627]: DEBUG nova.network.neutron [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Successfully updated port: c9e055e0-0774-4741-a16c-139233d6803d {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1196.988418] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "367a83b9-79f8-43b2-91dd-40d13193fa40" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.988811] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "367a83b9-79f8-43b2-91dd-40d13193fa40" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.988924] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "367a83b9-79f8-43b2-91dd-40d13193fa40-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.989583] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "367a83b9-79f8-43b2-91dd-40d13193fa40-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.989761] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "367a83b9-79f8-43b2-91dd-40d13193fa40-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.992069] env[62627]: INFO nova.compute.manager [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Terminating instance [ 1197.004100] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195173, 'name': CreateVM_Task, 'duration_secs': 0.365304} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.004811] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1197.005512] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.005663] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.006010] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1197.006516] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0a7ace4-171a-419f-9565-ca1483e6af9f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.011985] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1197.011985] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529dcbc5-5958-2c62-a63e-a716d6d02d1f" [ 1197.011985] env[62627]: _type = "Task" [ 1197.011985] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.021185] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529dcbc5-5958-2c62-a63e-a716d6d02d1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.074820] env[62627]: INFO nova.compute.manager [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Took 12.56 seconds to build instance. [ 1197.201122] env[62627]: DEBUG oslo_concurrency.lockutils [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] Releasing lock "refresh_cache-722d7bce-f0d7-4891-aa48-0ead10786a6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.201122] env[62627]: DEBUG nova.compute.manager [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Received event network-changed-58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1197.201122] env[62627]: DEBUG nova.compute.manager [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing instance network info cache due to event network-changed-58815871-fdac-4406-a61e-4d7fca125122. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1197.201122] env[62627]: DEBUG oslo_concurrency.lockutils [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] Acquiring lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.201122] env[62627]: DEBUG oslo_concurrency.lockutils [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] Acquired lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.201122] env[62627]: DEBUG nova.network.neutron [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing network info cache for port 58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1197.229622] env[62627]: DEBUG nova.network.neutron [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Successfully created port: 9a79e592-1487-4b96-93f2-66df8b0224a4 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1197.233013] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.320825] env[62627]: DEBUG nova.compute.manager [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1197.323619] env[62627]: DEBUG nova.objects.base [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1197.419933] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f4a262-dcc0-550e-c988-33016ccc4daf, 'name': SearchDatastore_Task, 'duration_secs': 0.025872} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.420763] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-449d1be5-791a-4b8f-a648-020ac52d7990 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.426365] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for the task: (returnval){ [ 1197.426365] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]526186e3-01a2-74af-9f4f-d6919cc9b003" [ 1197.426365] env[62627]: _type = "Task" [ 1197.426365] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.434071] env[62627]: DEBUG nova.network.neutron [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Port 332cfd7e-9934-429a-bc74-275d48f07fdb binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1197.446523] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "refresh_cache-30db4963-095b-4297-ad7c-0606451bd2b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.446523] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "refresh_cache-30db4963-095b-4297-ad7c-0606451bd2b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.446523] env[62627]: DEBUG nova.network.neutron [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1197.447063] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526186e3-01a2-74af-9f4f-d6919cc9b003, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.498246] env[62627]: DEBUG nova.compute.manager [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1197.498528] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1197.499430] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c7519f-ae61-41b8-8ae0-4b1a4dc8d5dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.507917] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1197.510421] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-857cb4ed-72bb-416d-83fb-fa55b4124128 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.517738] env[62627]: DEBUG oslo_vmware.api [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1197.517738] env[62627]: value = "task-2195174" [ 1197.517738] env[62627]: _type = "Task" [ 1197.517738] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.521380] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529dcbc5-5958-2c62-a63e-a716d6d02d1f, 'name': SearchDatastore_Task, 'duration_secs': 0.019972} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.526299] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.526569] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1197.526735] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.532436] env[62627]: DEBUG oslo_vmware.api [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195174, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.565419] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605fe51b-29c0-4ad2-8b9f-f9aa8c831cea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.573353] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c0619f-0f7a-4d21-8432-6a211998c339 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.576560] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a9fc708b-207f-46ff-a70c-7195be176eb5 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.070s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.605115] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a567584-e65b-45a0-b02d-0b4891e7e81d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.612857] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e61272-1775-4653-bc51-0dc4f19d8575 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.627147] env[62627]: DEBUG nova.compute.provider_tree [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.951465] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526186e3-01a2-74af-9f4f-d6919cc9b003, 'name': SearchDatastore_Task, 'duration_secs': 0.017726} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.952840] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.953102] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 722d7bce-f0d7-4891-aa48-0ead10786a6b/722d7bce-f0d7-4891-aa48-0ead10786a6b.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1197.953401] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.953606] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1197.953831] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86138dc6-720f-4cb3-8eb7-bdf49e2728bb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.957431] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2f5b24a-fc85-4991-8cb2-8ac137df8856 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.965372] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for the task: (returnval){ [ 1197.965372] env[62627]: value = "task-2195175" [ 1197.965372] env[62627]: _type = "Task" [ 1197.965372] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.969329] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1197.969434] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1197.970444] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0205d849-774e-4c91-9c52-a4e2f7800bb6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.975989] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195175, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.979661] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1197.979661] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525e2d7d-c3ca-3b39-2976-2b96dde2cf0f" [ 1197.979661] env[62627]: _type = "Task" [ 1197.979661] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.988432] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525e2d7d-c3ca-3b39-2976-2b96dde2cf0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.000651] env[62627]: DEBUG nova.compute.manager [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Received event network-vif-plugged-c9e055e0-0774-4741-a16c-139233d6803d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1198.000876] env[62627]: DEBUG oslo_concurrency.lockutils [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] Acquiring lock "30db4963-095b-4297-ad7c-0606451bd2b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.001198] env[62627]: DEBUG oslo_concurrency.lockutils [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] Lock "30db4963-095b-4297-ad7c-0606451bd2b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.001543] env[62627]: DEBUG oslo_concurrency.lockutils [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] Lock "30db4963-095b-4297-ad7c-0606451bd2b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.001823] env[62627]: DEBUG nova.compute.manager [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] No waiting events found dispatching network-vif-plugged-c9e055e0-0774-4741-a16c-139233d6803d {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1198.002082] env[62627]: WARNING nova.compute.manager [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Received unexpected event network-vif-plugged-c9e055e0-0774-4741-a16c-139233d6803d for instance with vm_state building and task_state spawning. [ 1198.002351] env[62627]: DEBUG nova.compute.manager [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Received event network-changed-c9e055e0-0774-4741-a16c-139233d6803d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1198.002589] env[62627]: DEBUG nova.compute.manager [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Refreshing instance network info cache due to event network-changed-c9e055e0-0774-4741-a16c-139233d6803d. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1198.002845] env[62627]: DEBUG oslo_concurrency.lockutils [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] Acquiring lock "refresh_cache-30db4963-095b-4297-ad7c-0606451bd2b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1198.011790] env[62627]: DEBUG nova.network.neutron [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1198.033723] env[62627]: DEBUG oslo_vmware.api [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195174, 'name': PowerOffVM_Task, 'duration_secs': 0.248532} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.036782] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1198.037012] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1198.037313] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d551683-af9e-43dc-ac18-8fc3e3bf6cdf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.040710] env[62627]: DEBUG nova.network.neutron [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updated VIF entry in instance network info cache for port 58815871-fdac-4406-a61e-4d7fca125122. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1198.041177] env[62627]: DEBUG nova.network.neutron [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updating instance_info_cache with network_info: [{"id": "58815871-fdac-4406-a61e-4d7fca125122", "address": "fa:16:3e:69:fc:43", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58815871-fd", "ovs_interfaceid": "58815871-fdac-4406-a61e-4d7fca125122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.102283] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1198.102513] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1198.102663] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Deleting the datastore file [datastore1] 367a83b9-79f8-43b2-91dd-40d13193fa40 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1198.102898] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-861a62ca-e70d-44c4-b2cf-26a4caa94b7b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.109151] env[62627]: DEBUG oslo_vmware.api [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for the task: (returnval){ [ 1198.109151] env[62627]: value = "task-2195177" [ 1198.109151] env[62627]: _type = "Task" [ 1198.109151] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.117399] env[62627]: DEBUG oslo_vmware.api [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195177, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.133041] env[62627]: DEBUG nova.scheduler.client.report [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1198.242161] env[62627]: DEBUG nova.network.neutron [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Updating instance_info_cache with network_info: [{"id": "c9e055e0-0774-4741-a16c-139233d6803d", "address": "fa:16:3e:5d:44:b9", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9e055e0-07", "ovs_interfaceid": "c9e055e0-0774-4741-a16c-139233d6803d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.336179] env[62627]: DEBUG nova.compute.manager [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1198.366656] env[62627]: DEBUG nova.virt.hardware [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1198.366927] env[62627]: DEBUG nova.virt.hardware [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1198.367135] env[62627]: DEBUG nova.virt.hardware [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1198.367344] env[62627]: DEBUG nova.virt.hardware [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1198.367509] env[62627]: DEBUG nova.virt.hardware [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1198.367710] env[62627]: DEBUG nova.virt.hardware [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1198.367953] env[62627]: DEBUG nova.virt.hardware [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1198.368166] env[62627]: DEBUG nova.virt.hardware [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1198.368358] env[62627]: DEBUG nova.virt.hardware [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1198.368584] env[62627]: DEBUG nova.virt.hardware [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1198.368769] env[62627]: DEBUG nova.virt.hardware [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1198.369735] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da2b284-421c-43eb-b84c-2a6dad6d22ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.380668] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3968fcd1-3f64-4e42-a15c-d6d6ac468fc7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.473509] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.473691] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.474157] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.481927] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195175, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.492253] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525e2d7d-c3ca-3b39-2976-2b96dde2cf0f, 'name': SearchDatastore_Task, 'duration_secs': 0.010055} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.493043] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19c47a2f-5ab5-4258-a6ec-1cde92a80267 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.498272] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1198.498272] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d0cfd9-8f68-faf3-5b98-c76be44dc2c5" [ 1198.498272] env[62627]: _type = "Task" [ 1198.498272] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.507570] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d0cfd9-8f68-faf3-5b98-c76be44dc2c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.544860] env[62627]: DEBUG oslo_concurrency.lockutils [req-a7d1426e-69db-4caf-90b4-310727005522 req-f598b7ad-0cd2-4f3c-b891-d0e7148952bf service nova] Releasing lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.618993] env[62627]: DEBUG oslo_vmware.api [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Task: {'id': task-2195177, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.410799} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.619267] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1198.619455] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1198.619639] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1198.619814] env[62627]: INFO nova.compute.manager [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1198.620061] env[62627]: DEBUG oslo.service.loopingcall [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1198.620257] env[62627]: DEBUG nova.compute.manager [-] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1198.620353] env[62627]: DEBUG nova.network.neutron [-] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1198.640375] env[62627]: DEBUG oslo_concurrency.lockutils [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.329s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.642716] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.410s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.644237] env[62627]: INFO nova.compute.claims [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1198.706755] env[62627]: DEBUG nova.compute.manager [req-7604799a-cd59-418b-9405-c9df65aa4f0c req-3d962922-b784-44f6-9c08-d066a529067f service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Received event network-vif-plugged-9a79e592-1487-4b96-93f2-66df8b0224a4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1198.706755] env[62627]: DEBUG oslo_concurrency.lockutils [req-7604799a-cd59-418b-9405-c9df65aa4f0c req-3d962922-b784-44f6-9c08-d066a529067f service nova] Acquiring lock "b68042fd-4955-41e4-b14f-1f8268ace377-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.706755] env[62627]: DEBUG oslo_concurrency.lockutils [req-7604799a-cd59-418b-9405-c9df65aa4f0c req-3d962922-b784-44f6-9c08-d066a529067f service nova] Lock "b68042fd-4955-41e4-b14f-1f8268ace377-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.706755] env[62627]: DEBUG oslo_concurrency.lockutils [req-7604799a-cd59-418b-9405-c9df65aa4f0c req-3d962922-b784-44f6-9c08-d066a529067f service nova] Lock "b68042fd-4955-41e4-b14f-1f8268ace377-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.706882] env[62627]: DEBUG nova.compute.manager [req-7604799a-cd59-418b-9405-c9df65aa4f0c req-3d962922-b784-44f6-9c08-d066a529067f service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] No waiting events found dispatching network-vif-plugged-9a79e592-1487-4b96-93f2-66df8b0224a4 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1198.707470] env[62627]: WARNING nova.compute.manager [req-7604799a-cd59-418b-9405-c9df65aa4f0c req-3d962922-b784-44f6-9c08-d066a529067f service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Received unexpected event network-vif-plugged-9a79e592-1487-4b96-93f2-66df8b0224a4 for instance with vm_state building and task_state spawning. [ 1198.745644] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "refresh_cache-30db4963-095b-4297-ad7c-0606451bd2b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.745644] env[62627]: DEBUG nova.compute.manager [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Instance network_info: |[{"id": "c9e055e0-0774-4741-a16c-139233d6803d", "address": "fa:16:3e:5d:44:b9", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9e055e0-07", "ovs_interfaceid": "c9e055e0-0774-4741-a16c-139233d6803d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1198.745644] env[62627]: DEBUG oslo_concurrency.lockutils [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] Acquired lock "refresh_cache-30db4963-095b-4297-ad7c-0606451bd2b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.745644] env[62627]: DEBUG nova.network.neutron [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Refreshing network info cache for port c9e055e0-0774-4741-a16c-139233d6803d {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1198.746763] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:44:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9e055e0-0774-4741-a16c-139233d6803d', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1198.760811] env[62627]: DEBUG oslo.service.loopingcall [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1198.762442] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1198.762862] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51f829cb-fc97-4217-ae81-518ed6cb9093 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.790733] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1198.790733] env[62627]: value = "task-2195178" [ 1198.790733] env[62627]: _type = "Task" [ 1198.790733] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.799580] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195178, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.981314] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195175, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54478} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.982221] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 722d7bce-f0d7-4891-aa48-0ead10786a6b/722d7bce-f0d7-4891-aa48-0ead10786a6b.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1198.982511] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1198.982805] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3bbc9a3-bc2f-4397-be53-1ffbe8e79690 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.990654] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for the task: (returnval){ [ 1198.990654] env[62627]: value = "task-2195179" [ 1198.990654] env[62627]: _type = "Task" [ 1198.990654] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.999816] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195179, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.009283] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d0cfd9-8f68-faf3-5b98-c76be44dc2c5, 'name': SearchDatastore_Task, 'duration_secs': 0.021216} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.009659] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.009949] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1/f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1199.010340] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d85a1043-06a1-4eb7-9508-6aa3e95c84d0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.017240] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1199.017240] env[62627]: value = "task-2195180" [ 1199.017240] env[62627]: _type = "Task" [ 1199.017240] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.026792] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195180, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.073374] env[62627]: DEBUG nova.compute.manager [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Stashing vm_state: active {{(pid=62627) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1199.152790] env[62627]: DEBUG oslo_concurrency.lockutils [None req-98851d85-a1b4-4da9-a390-3aa754ecc5a3 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "b282093d-8013-436d-94d2-bc26161456ea" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.283s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.156562] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "b282093d-8013-436d-94d2-bc26161456ea" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.866s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.156562] env[62627]: INFO nova.compute.manager [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Unshelving [ 1199.305477] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195178, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.306304] env[62627]: DEBUG nova.network.neutron [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Successfully updated port: 9a79e592-1487-4b96-93f2-66df8b0224a4 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1199.331796] env[62627]: DEBUG nova.compute.manager [req-2772cb1d-48c8-433e-841e-bfb10d165c8c req-74807734-8e1e-4e36-a895-9898d56ae79f service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Received event network-changed-9a79e592-1487-4b96-93f2-66df8b0224a4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1199.332137] env[62627]: DEBUG nova.compute.manager [req-2772cb1d-48c8-433e-841e-bfb10d165c8c req-74807734-8e1e-4e36-a895-9898d56ae79f service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Refreshing instance network info cache due to event network-changed-9a79e592-1487-4b96-93f2-66df8b0224a4. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1199.332311] env[62627]: DEBUG oslo_concurrency.lockutils [req-2772cb1d-48c8-433e-841e-bfb10d165c8c req-74807734-8e1e-4e36-a895-9898d56ae79f service nova] Acquiring lock "refresh_cache-b68042fd-4955-41e4-b14f-1f8268ace377" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.332418] env[62627]: DEBUG oslo_concurrency.lockutils [req-2772cb1d-48c8-433e-841e-bfb10d165c8c req-74807734-8e1e-4e36-a895-9898d56ae79f service nova] Acquired lock "refresh_cache-b68042fd-4955-41e4-b14f-1f8268ace377" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.332630] env[62627]: DEBUG nova.network.neutron [req-2772cb1d-48c8-433e-841e-bfb10d165c8c req-74807734-8e1e-4e36-a895-9898d56ae79f service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Refreshing network info cache for port 9a79e592-1487-4b96-93f2-66df8b0224a4 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1199.479098] env[62627]: DEBUG nova.network.neutron [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Updated VIF entry in instance network info cache for port c9e055e0-0774-4741-a16c-139233d6803d. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1199.479513] env[62627]: DEBUG nova.network.neutron [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Updating instance_info_cache with network_info: [{"id": "c9e055e0-0774-4741-a16c-139233d6803d", "address": "fa:16:3e:5d:44:b9", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9e055e0-07", "ovs_interfaceid": "c9e055e0-0774-4741-a16c-139233d6803d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.501061] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195179, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096841} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.501398] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1199.502251] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac338a6e-c57c-4a4a-a09e-eaf5a9e6be86 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.527698] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 722d7bce-f0d7-4891-aa48-0ead10786a6b/722d7bce-f0d7-4891-aa48-0ead10786a6b.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1199.528197] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.528315] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.528493] env[62627]: DEBUG nova.network.neutron [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1199.529801] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdaa7d1b-276f-46b6-8d09-6f0c105f5910 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.556613] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195180, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.558107] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for the task: (returnval){ [ 1199.558107] env[62627]: value = "task-2195181" [ 1199.558107] env[62627]: _type = "Task" [ 1199.558107] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.568262] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195181, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.596507] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.717198] env[62627]: DEBUG nova.network.neutron [-] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.806462] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195178, 'name': CreateVM_Task, 'duration_secs': 0.546214} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.806874] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1199.807622] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.807785] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.808124] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1199.808643] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-b68042fd-4955-41e4-b14f-1f8268ace377" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.808841] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fe92d87-43b3-4866-815d-3cfc090cbf07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.813735] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1199.813735] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ce2aa4-25cc-3bdc-30cd-193ad8418a19" [ 1199.813735] env[62627]: _type = "Task" [ 1199.813735] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.824140] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ce2aa4-25cc-3bdc-30cd-193ad8418a19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.865007] env[62627]: DEBUG nova.network.neutron [req-2772cb1d-48c8-433e-841e-bfb10d165c8c req-74807734-8e1e-4e36-a895-9898d56ae79f service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1199.887496] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c35b9d-41f6-4fab-bff7-13c9f9262056 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.895873] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e5bf4a-d71a-4799-bb7c-17ba7e93ff85 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.929111] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559bcbf0-8bfa-4a79-8688-9fee752f95b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.936832] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b05689c-b4ce-4183-b8de-94e2c5642008 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.951390] env[62627]: DEBUG nova.compute.provider_tree [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.973958] env[62627]: DEBUG nova.network.neutron [req-2772cb1d-48c8-433e-841e-bfb10d165c8c req-74807734-8e1e-4e36-a895-9898d56ae79f service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.982519] env[62627]: DEBUG oslo_concurrency.lockutils [req-3bd536eb-2f1a-43a7-b5be-49a0a8b16c6e req-498914ea-53d2-46ad-af1c-e889c7e68717 service nova] Releasing lock "refresh_cache-30db4963-095b-4297-ad7c-0606451bd2b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.030964] env[62627]: DEBUG nova.compute.manager [req-2283555f-5083-487f-b68b-7a6fc0bf1dd6 req-5ad601da-fbdb-4a16-867b-25cf8a21276f service nova] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Received event network-vif-deleted-4534e6a9-471a-4703-aa7a-7aac3d20292f {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1200.042717] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195180, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.817698} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.043165] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1/f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1200.043493] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1200.043669] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-238a5c75-3b46-4a7c-9458-7fe0749b0b4a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.053098] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1200.053098] env[62627]: value = "task-2195182" [ 1200.053098] env[62627]: _type = "Task" [ 1200.053098] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.061393] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195182, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.070271] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195181, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.180147] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.219659] env[62627]: INFO nova.compute.manager [-] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Took 1.60 seconds to deallocate network for instance. [ 1200.324595] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ce2aa4-25cc-3bdc-30cd-193ad8418a19, 'name': SearchDatastore_Task, 'duration_secs': 0.010202} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.324912] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.325166] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1200.325406] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.325551] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.325740] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1200.326013] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6a38b43-e361-42c4-8cb5-c87930fc7f6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.334822] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1200.335018] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1200.335736] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7985ca02-a535-4a03-b7d4-e14cb44b30bd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.340485] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1200.340485] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52832725-65de-4817-fc0a-706b2309d2a5" [ 1200.340485] env[62627]: _type = "Task" [ 1200.340485] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.347969] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52832725-65de-4817-fc0a-706b2309d2a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.454953] env[62627]: DEBUG nova.scheduler.client.report [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1200.476423] env[62627]: DEBUG oslo_concurrency.lockutils [req-2772cb1d-48c8-433e-841e-bfb10d165c8c req-74807734-8e1e-4e36-a895-9898d56ae79f service nova] Releasing lock "refresh_cache-b68042fd-4955-41e4-b14f-1f8268ace377" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.477489] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-b68042fd-4955-41e4-b14f-1f8268ace377" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.478577] env[62627]: DEBUG nova.network.neutron [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.563954] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195182, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065787} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.566890] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1200.567946] env[62627]: DEBUG nova.network.neutron [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance_info_cache with network_info: [{"id": "332cfd7e-9934-429a-bc74-275d48f07fdb", "address": "fa:16:3e:68:4c:72", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap332cfd7e-99", "ovs_interfaceid": "332cfd7e-9934-429a-bc74-275d48f07fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.571106] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b29dd78-e4a7-4a23-ae78-6fe255f845b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.580715] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195181, 'name': ReconfigVM_Task, 'duration_secs': 0.763419} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.589182] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 722d7bce-f0d7-4891-aa48-0ead10786a6b/722d7bce-f0d7-4891-aa48-0ead10786a6b.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1200.598160] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1/f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1200.598456] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49631031-f2fa-4959-be8c-28821a80896e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.600222] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c3d054b-3c22-43e7-9b01-94d4e22e9482 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.620773] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for the task: (returnval){ [ 1200.620773] env[62627]: value = "task-2195183" [ 1200.620773] env[62627]: _type = "Task" [ 1200.620773] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.621946] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1200.621946] env[62627]: value = "task-2195184" [ 1200.621946] env[62627]: _type = "Task" [ 1200.621946] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.632575] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195183, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.726231] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.850894] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52832725-65de-4817-fc0a-706b2309d2a5, 'name': SearchDatastore_Task, 'duration_secs': 0.01102} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.852083] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fcf33e2-a3a7-4051-9a6c-b19cb8f509af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.856653] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1200.856653] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5225048d-91d8-aaea-6ee7-0c46c490cf4d" [ 1200.856653] env[62627]: _type = "Task" [ 1200.856653] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.864582] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5225048d-91d8-aaea-6ee7-0c46c490cf4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.961679] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.962239] env[62627]: DEBUG nova.compute.manager [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1200.965126] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.369s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.008726] env[62627]: DEBUG nova.network.neutron [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1201.076201] env[62627]: DEBUG oslo_concurrency.lockutils [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.137374] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195183, 'name': Rename_Task, 'duration_secs': 0.22885} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.140560] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1201.140834] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195184, 'name': ReconfigVM_Task, 'duration_secs': 0.272853} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.141047] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5bac95b-bb51-482e-a7fb-bff1ef8d25b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.142521] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfigured VM instance instance-00000056 to attach disk [datastore2] f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1/f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1201.143927] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'encrypted': False, 'encryption_secret_uuid': None, 'encryption_options': None, 'boot_index': 0, 'encryption_format': None, 'guest_format': None, 'disk_bus': None, 'device_type': 'disk', 'size': 0, 'image_id': 'd5497cf1-16e8-478a-a834-e9289b1751b7'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'attachment_id': '98b7171b-4d56-4cd7-9f38-3fc0c53d7766', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447803', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'name': 'volume-4c7ed974-1052-4c7c-aece-d94f225c352b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1', 'attached_at': '', 'detached_at': '', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'serial': '4c7ed974-1052-4c7c-aece-d94f225c352b'}, 'boot_index': None, 'mount_device': '/dev/sdb', 'disk_bus': None, 'device_type': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=62627) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1201.144143] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1201.144339] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447803', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'name': 'volume-4c7ed974-1052-4c7c-aece-d94f225c352b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1', 'attached_at': '', 'detached_at': '', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'serial': '4c7ed974-1052-4c7c-aece-d94f225c352b'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1201.145077] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efec4fc-ecd1-4c88-a906-240d45765ebb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.162332] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2e7e83-b634-43a1-b3cd-02a3121cac8a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.164923] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for the task: (returnval){ [ 1201.164923] env[62627]: value = "task-2195185" [ 1201.164923] env[62627]: _type = "Task" [ 1201.164923] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.165877] env[62627]: DEBUG nova.network.neutron [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Updating instance_info_cache with network_info: [{"id": "9a79e592-1487-4b96-93f2-66df8b0224a4", "address": "fa:16:3e:56:2b:23", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a79e592-14", "ovs_interfaceid": "9a79e592-1487-4b96-93f2-66df8b0224a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.198329] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] volume-4c7ed974-1052-4c7c-aece-d94f225c352b/volume-4c7ed974-1052-4c7c-aece-d94f225c352b.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1201.199478] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2350ebf6-5290-4a9f-8c3e-93544bd7fd8c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.216193] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195185, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.221757] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1201.221757] env[62627]: value = "task-2195186" [ 1201.221757] env[62627]: _type = "Task" [ 1201.221757] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.230303] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195186, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.368811] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5225048d-91d8-aaea-6ee7-0c46c490cf4d, 'name': SearchDatastore_Task, 'duration_secs': 0.029684} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.369175] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.369534] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 30db4963-095b-4297-ad7c-0606451bd2b6/30db4963-095b-4297-ad7c-0606451bd2b6.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1201.369828] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4dd5f32e-44f7-46b7-a485-a258ef4cead9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.377609] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1201.377609] env[62627]: value = "task-2195187" [ 1201.377609] env[62627]: _type = "Task" [ 1201.377609] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.385453] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.468735] env[62627]: DEBUG nova.compute.utils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1201.470467] env[62627]: DEBUG nova.compute.manager [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1201.470622] env[62627]: DEBUG nova.network.neutron [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1201.474747] env[62627]: INFO nova.compute.claims [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1201.518954] env[62627]: DEBUG nova.policy [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5e5eee141e496db2d2ea316c43e6be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e4f7ad3ebcf4c4d97bdbce58eec5a09', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1201.609997] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0cba5c-34f4-4d2e-8384-7185c534fb78 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.631408] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ba58f8-6232-41de-826c-22e459b51614 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.639244] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance 'd2d4dd6b-d5c5-402b-b165-4c9ae1e36b93' progress to 83 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1201.682090] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195185, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.690471] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-b68042fd-4955-41e4-b14f-1f8268ace377" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.690787] env[62627]: DEBUG nova.compute.manager [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Instance network_info: |[{"id": "9a79e592-1487-4b96-93f2-66df8b0224a4", "address": "fa:16:3e:56:2b:23", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a79e592-14", "ovs_interfaceid": "9a79e592-1487-4b96-93f2-66df8b0224a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1201.691541] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:2b:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a79e592-1487-4b96-93f2-66df8b0224a4', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1201.700717] env[62627]: DEBUG oslo.service.loopingcall [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1201.701354] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1201.701857] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b64830c7-8dbe-4172-be24-8f775c4b39bd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.728018] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1201.728018] env[62627]: value = "task-2195188" [ 1201.728018] env[62627]: _type = "Task" [ 1201.728018] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.737436] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195186, 'name': ReconfigVM_Task, 'duration_secs': 0.302635} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.738421] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfigured VM instance instance-00000056 to attach disk [datastore2] volume-4c7ed974-1052-4c7c-aece-d94f225c352b/volume-4c7ed974-1052-4c7c-aece-d94f225c352b.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1201.748383] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ee8f05e-990c-423f-bf85-7a4d10115dcb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.761963] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195188, 'name': CreateVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.770683] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1201.770683] env[62627]: value = "task-2195189" [ 1201.770683] env[62627]: _type = "Task" [ 1201.770683] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.784031] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195189, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.890086] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195187, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.980163] env[62627]: DEBUG nova.compute.manager [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1201.984275] env[62627]: INFO nova.compute.resource_tracker [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating resource usage from migration 1c5f3704-de8e-4996-98a8-086c50a3c0d7 [ 1202.056286] env[62627]: DEBUG nova.network.neutron [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Successfully created port: 3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1202.146939] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-968fc0cc-5479-4e6f-8099-3f4b19cdbb54 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance 'd2d4dd6b-d5c5-402b-b165-4c9ae1e36b93' progress to 100 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1202.177237] env[62627]: DEBUG oslo_vmware.api [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195185, 'name': PowerOnVM_Task, 'duration_secs': 0.764342} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.177640] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1202.177890] env[62627]: INFO nova.compute.manager [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Took 8.59 seconds to spawn the instance on the hypervisor. [ 1202.178093] env[62627]: DEBUG nova.compute.manager [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1202.178928] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528d6781-38c2-49b5-b659-a972afd1b347 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.234477] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15083713-0bc1-4468-84dc-050080333d44 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.244662] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb28d1b-29c4-438d-9d90-747916580a1e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.247929] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195188, 'name': CreateVM_Task, 'duration_secs': 0.480668} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.248109] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1202.249115] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.249285] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.249597] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1202.249830] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5ad8abc-e81c-4544-8d95-b81fcddab975 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.279240] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d680b95-3ac6-4c01-84d2-bc35484e98e0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.282973] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1202.282973] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52145dd2-e479-3287-1734-d733cb81ab29" [ 1202.282973] env[62627]: _type = "Task" [ 1202.282973] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.291972] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195189, 'name': ReconfigVM_Task, 'duration_secs': 0.285509} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.291972] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447803', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'name': 'volume-4c7ed974-1052-4c7c-aece-d94f225c352b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1', 'attached_at': '', 'detached_at': '', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'serial': '4c7ed974-1052-4c7c-aece-d94f225c352b'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1202.293169] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e2d815-c1a8-412a-be7c-570809e55b9e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.301238] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b71899c5-45da-4a24-b6c3-e927006b3041 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.302705] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52145dd2-e479-3287-1734-d733cb81ab29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.312451] env[62627]: DEBUG nova.compute.provider_tree [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.314665] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1202.314665] env[62627]: value = "task-2195190" [ 1202.314665] env[62627]: _type = "Task" [ 1202.314665] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.323444] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195190, 'name': Rename_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.387860] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195187, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643763} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.388144] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 30db4963-095b-4297-ad7c-0606451bd2b6/30db4963-095b-4297-ad7c-0606451bd2b6.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1202.388361] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1202.388613] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e88943b-6ff9-42c6-a6d2-2a58b66d8776 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.395765] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1202.395765] env[62627]: value = "task-2195191" [ 1202.395765] env[62627]: _type = "Task" [ 1202.395765] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.403404] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195191, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.696975] env[62627]: INFO nova.compute.manager [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Took 14.89 seconds to build instance. [ 1202.795197] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52145dd2-e479-3287-1734-d733cb81ab29, 'name': SearchDatastore_Task, 'duration_secs': 0.016811} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.795524] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.795807] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1202.796075] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.796243] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.796436] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1202.796704] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f77b09d-48b1-4b5b-b16b-c9be724a3fce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.811806] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1202.812077] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1202.812861] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c0facc0-b24d-408d-a3c9-7a101cab4ace {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.815930] env[62627]: DEBUG nova.scheduler.client.report [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1202.825984] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1202.825984] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]527b1c0d-2d28-9400-dd94-6a2f64fcbd25" [ 1202.825984] env[62627]: _type = "Task" [ 1202.825984] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.829374] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195190, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.838732] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527b1c0d-2d28-9400-dd94-6a2f64fcbd25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.905597] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195191, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.994898] env[62627]: DEBUG nova.compute.manager [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1203.025305] env[62627]: DEBUG nova.virt.hardware [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1203.025617] env[62627]: DEBUG nova.virt.hardware [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1203.025787] env[62627]: DEBUG nova.virt.hardware [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1203.025970] env[62627]: DEBUG nova.virt.hardware [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1203.026139] env[62627]: DEBUG nova.virt.hardware [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1203.026292] env[62627]: DEBUG nova.virt.hardware [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1203.026499] env[62627]: DEBUG nova.virt.hardware [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1203.026659] env[62627]: DEBUG nova.virt.hardware [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1203.026824] env[62627]: DEBUG nova.virt.hardware [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1203.026988] env[62627]: DEBUG nova.virt.hardware [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1203.027233] env[62627]: DEBUG nova.virt.hardware [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1203.028221] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c9df7c-02ed-4f5c-9abd-09f9902b1b11 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.036311] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d48a70-3274-4067-8e32-5f9a68fbf1ed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.199894] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ab161b8d-12ec-4c22-a16b-11c16ec186ad tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Lock "722d7bce-f0d7-4891-aa48-0ead10786a6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.397s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.326178] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.361s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.326380] env[62627]: INFO nova.compute.manager [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Migrating [ 1203.332529] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.153s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.332724] env[62627]: DEBUG nova.objects.instance [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lazy-loading 'pci_requests' on Instance uuid b282093d-8013-436d-94d2-bc26161456ea {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.350729] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195190, 'name': Rename_Task, 'duration_secs': 0.860932} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.351683] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1203.351982] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4de9cf9-47d0-4cb6-8fa5-6e1b49b62ad0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.358989] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527b1c0d-2d28-9400-dd94-6a2f64fcbd25, 'name': SearchDatastore_Task, 'duration_secs': 0.02214} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.360386] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19219253-76ab-4685-8526-658b524b71cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.365233] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1203.365233] env[62627]: value = "task-2195192" [ 1203.365233] env[62627]: _type = "Task" [ 1203.365233] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.370255] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1203.370255] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5216a677-6e31-c5fc-32e0-894637cb2d7b" [ 1203.370255] env[62627]: _type = "Task" [ 1203.370255] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.377101] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195192, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.382248] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5216a677-6e31-c5fc-32e0-894637cb2d7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.407755] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195191, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.711312} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.408732] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1203.411111] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191caad2-5192-4d52-a68f-8b9c46556ed3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.436060] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 30db4963-095b-4297-ad7c-0606451bd2b6/30db4963-095b-4297-ad7c-0606451bd2b6.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1203.436746] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d3ad66d-8a5e-4092-8f86-faa78c1e9384 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.459262] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1203.459262] env[62627]: value = "task-2195193" [ 1203.459262] env[62627]: _type = "Task" [ 1203.459262] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.468409] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195193, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.647859] env[62627]: DEBUG nova.compute.manager [req-6c4a7aa7-89f5-4856-9383-a63987905e6f req-edac9da8-65fe-4e8c-a4c3-13884143d761 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Received event network-vif-plugged-3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1203.648207] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4a7aa7-89f5-4856-9383-a63987905e6f req-edac9da8-65fe-4e8c-a4c3-13884143d761 service nova] Acquiring lock "1109aae0-62d2-4c55-8c68-f224f3de8b55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.648579] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4a7aa7-89f5-4856-9383-a63987905e6f req-edac9da8-65fe-4e8c-a4c3-13884143d761 service nova] Lock "1109aae0-62d2-4c55-8c68-f224f3de8b55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.648915] env[62627]: DEBUG oslo_concurrency.lockutils [req-6c4a7aa7-89f5-4856-9383-a63987905e6f req-edac9da8-65fe-4e8c-a4c3-13884143d761 service nova] Lock "1109aae0-62d2-4c55-8c68-f224f3de8b55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.649213] env[62627]: DEBUG nova.compute.manager [req-6c4a7aa7-89f5-4856-9383-a63987905e6f req-edac9da8-65fe-4e8c-a4c3-13884143d761 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] No waiting events found dispatching network-vif-plugged-3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1203.649527] env[62627]: WARNING nova.compute.manager [req-6c4a7aa7-89f5-4856-9383-a63987905e6f req-edac9da8-65fe-4e8c-a4c3-13884143d761 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Received unexpected event network-vif-plugged-3166f86b-c852-4335-9c18-b4c68f51d8ab for instance with vm_state building and task_state spawning. [ 1203.741503] env[62627]: DEBUG nova.network.neutron [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Successfully updated port: 3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1203.838832] env[62627]: DEBUG nova.objects.instance [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lazy-loading 'numa_topology' on Instance uuid b282093d-8013-436d-94d2-bc26161456ea {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.852667] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.852874] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.853141] env[62627]: DEBUG nova.network.neutron [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1203.880737] env[62627]: DEBUG oslo_vmware.api [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195192, 'name': PowerOnVM_Task, 'duration_secs': 0.470092} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.881342] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1203.881535] env[62627]: DEBUG nova.compute.manager [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1203.882307] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7842e32d-5746-4aaa-86e6-2b9d8784cd4f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.888330] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5216a677-6e31-c5fc-32e0-894637cb2d7b, 'name': SearchDatastore_Task, 'duration_secs': 0.034871} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.888897] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1203.889172] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] b68042fd-4955-41e4-b14f-1f8268ace377/b68042fd-4955-41e4-b14f-1f8268ace377.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1203.889418] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-607f73b7-5011-4b92-92eb-e653d73bcac3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.900572] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1203.900572] env[62627]: value = "task-2195194" [ 1203.900572] env[62627]: _type = "Task" [ 1203.900572] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.909916] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.970079] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195193, 'name': ReconfigVM_Task, 'duration_secs': 0.2963} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.970439] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 30db4963-095b-4297-ad7c-0606451bd2b6/30db4963-095b-4297-ad7c-0606451bd2b6.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1203.971119] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5951c03d-852d-467e-8db9-e3f48eee84be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.979802] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1203.979802] env[62627]: value = "task-2195195" [ 1203.979802] env[62627]: _type = "Task" [ 1203.979802] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.990896] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195195, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.246035] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.246429] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.246429] env[62627]: DEBUG nova.network.neutron [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1204.341347] env[62627]: INFO nova.compute.claims [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1204.405305] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.411823] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195194, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.421818] env[62627]: DEBUG oslo_concurrency.lockutils [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquiring lock "722d7bce-f0d7-4891-aa48-0ead10786a6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.422718] env[62627]: DEBUG oslo_concurrency.lockutils [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Lock "722d7bce-f0d7-4891-aa48-0ead10786a6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.422978] env[62627]: DEBUG oslo_concurrency.lockutils [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquiring lock "722d7bce-f0d7-4891-aa48-0ead10786a6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.423214] env[62627]: DEBUG oslo_concurrency.lockutils [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Lock "722d7bce-f0d7-4891-aa48-0ead10786a6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.423395] env[62627]: DEBUG oslo_concurrency.lockutils [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Lock "722d7bce-f0d7-4891-aa48-0ead10786a6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.425451] env[62627]: INFO nova.compute.manager [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Terminating instance [ 1204.489767] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195195, 'name': Rename_Task, 'duration_secs': 0.176945} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.490065] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1204.490324] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d70429f-783b-49a1-a5a0-456c920760be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.496957] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1204.496957] env[62627]: value = "task-2195196" [ 1204.496957] env[62627]: _type = "Task" [ 1204.496957] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.507701] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.516562] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.516897] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.517129] env[62627]: DEBUG nova.compute.manager [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Going to confirm migration 5 {{(pid=62627) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1204.608404] env[62627]: DEBUG nova.network.neutron [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance_info_cache with network_info: [{"id": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "address": "fa:16:3e:c6:8b:aa", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd885a4e0-fb", "ovs_interfaceid": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.793063] env[62627]: DEBUG nova.network.neutron [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1204.912089] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195194, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515791} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.914457] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] b68042fd-4955-41e4-b14f-1f8268ace377/b68042fd-4955-41e4-b14f-1f8268ace377.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1204.914676] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1204.914937] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06feb433-d6e5-4273-a617-a236976581c7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.925752] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1204.925752] env[62627]: value = "task-2195197" [ 1204.925752] env[62627]: _type = "Task" [ 1204.925752] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.931029] env[62627]: DEBUG nova.compute.manager [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1204.931250] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1204.934751] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dee973e-5dd6-4b7d-9737-7290c9874582 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.937534] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195197, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.942343] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1204.942600] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b90b4342-1978-496a-9ba0-986f202d9075 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.948944] env[62627]: DEBUG oslo_vmware.api [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for the task: (returnval){ [ 1204.948944] env[62627]: value = "task-2195198" [ 1204.948944] env[62627]: _type = "Task" [ 1204.948944] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.953442] env[62627]: DEBUG nova.network.neutron [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updating instance_info_cache with network_info: [{"id": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "address": "fa:16:3e:7b:bd:70", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3166f86b-c8", "ovs_interfaceid": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.960936] env[62627]: DEBUG oslo_vmware.api [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195198, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.007440] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195196, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.055192] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.055380] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.055586] env[62627]: DEBUG nova.network.neutron [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1205.055826] env[62627]: DEBUG nova.objects.instance [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'info_cache' on Instance uuid d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.111412] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.435145] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195197, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076281} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.437428] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1205.438386] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768cdf01-42ea-4814-bf44-540e866afb04 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.461415] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] b68042fd-4955-41e4-b14f-1f8268ace377/b68042fd-4955-41e4-b14f-1f8268ace377.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1205.467017] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.467313] env[62627]: DEBUG nova.compute.manager [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Instance network_info: |[{"id": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "address": "fa:16:3e:7b:bd:70", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3166f86b-c8", "ovs_interfaceid": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1205.467544] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9616f299-d156-41f6-99ce-5a3bc9248e45 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.481859] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:bd:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '023d6500-887e-4dc4-bec5-06b40450d9c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3166f86b-c852-4335-9c18-b4c68f51d8ab', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1205.488933] env[62627]: DEBUG oslo.service.loopingcall [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1205.489505] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1205.490038] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31c68121-5050-4b76-80c1-df047694e2ce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.510077] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1205.510077] env[62627]: value = "task-2195199" [ 1205.510077] env[62627]: _type = "Task" [ 1205.510077] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.510305] env[62627]: DEBUG oslo_vmware.api [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195198, 'name': PowerOffVM_Task, 'duration_secs': 0.241078} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.513770] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1205.513984] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1205.518794] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbc17c3f-365e-4f46-8181-30e3738dd186 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.523396] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1205.523396] env[62627]: value = "task-2195200" [ 1205.523396] env[62627]: _type = "Task" [ 1205.523396] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.530101] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195196, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.533405] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.538504] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195200, 'name': CreateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.591442] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1205.591761] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1205.592080] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Deleting the datastore file [datastore2] 722d7bce-f0d7-4891-aa48-0ead10786a6b {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1205.592478] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96ee43d8-34cc-4a7b-9343-cc8b562674d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.604537] env[62627]: DEBUG oslo_vmware.api [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for the task: (returnval){ [ 1205.604537] env[62627]: value = "task-2195202" [ 1205.604537] env[62627]: _type = "Task" [ 1205.604537] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.622514] env[62627]: DEBUG oslo_vmware.api [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195202, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.624559] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02accb90-29a6-4e4b-a200-178690df8fe7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.632661] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c860063-1b5f-410a-bcc0-9827320d7539 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.667074] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b579941-3ff9-4ecf-8577-394cb85dd080 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.678186] env[62627]: DEBUG nova.compute.manager [req-ce910e2f-d5b9-4ea3-b50f-ab8a9f55d3be req-7a8eb1dd-93e2-4ddb-82e1-7140618a7f99 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Received event network-changed-3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1205.678186] env[62627]: DEBUG nova.compute.manager [req-ce910e2f-d5b9-4ea3-b50f-ab8a9f55d3be req-7a8eb1dd-93e2-4ddb-82e1-7140618a7f99 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Refreshing instance network info cache due to event network-changed-3166f86b-c852-4335-9c18-b4c68f51d8ab. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1205.678442] env[62627]: DEBUG oslo_concurrency.lockutils [req-ce910e2f-d5b9-4ea3-b50f-ab8a9f55d3be req-7a8eb1dd-93e2-4ddb-82e1-7140618a7f99 service nova] Acquiring lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.678615] env[62627]: DEBUG oslo_concurrency.lockutils [req-ce910e2f-d5b9-4ea3-b50f-ab8a9f55d3be req-7a8eb1dd-93e2-4ddb-82e1-7140618a7f99 service nova] Acquired lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.678812] env[62627]: DEBUG nova.network.neutron [req-ce910e2f-d5b9-4ea3-b50f-ab8a9f55d3be req-7a8eb1dd-93e2-4ddb-82e1-7140618a7f99 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Refreshing network info cache for port 3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1205.687826] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36b4a6e-f956-4280-8e59-5cb705b249d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.707059] env[62627]: DEBUG nova.compute.provider_tree [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1206.025325] env[62627]: DEBUG oslo_vmware.api [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195196, 'name': PowerOnVM_Task, 'duration_secs': 1.286658} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.031166] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1206.031434] env[62627]: INFO nova.compute.manager [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Took 10.07 seconds to spawn the instance on the hypervisor. [ 1206.031684] env[62627]: DEBUG nova.compute.manager [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1206.032015] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195199, 'name': ReconfigVM_Task, 'duration_secs': 0.459258} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.032734] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf97d77-bb1c-4625-bedc-b6fbd97d2acf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.034939] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Reconfigured VM instance instance-00000063 to attach disk [datastore2] b68042fd-4955-41e4-b14f-1f8268ace377/b68042fd-4955-41e4-b14f-1f8268ace377.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1206.035808] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a008012-6947-42ad-9901-2846a04dce6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.040704] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195200, 'name': CreateVM_Task, 'duration_secs': 0.414181} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.042085] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1206.046274] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.046437] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.046751] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1206.047054] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1206.047054] env[62627]: value = "task-2195203" [ 1206.047054] env[62627]: _type = "Task" [ 1206.047054] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.047440] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a3dafa5-0a76-4e07-90b3-fbffeac6bee7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.055412] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1206.055412] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52840b01-cb37-16f7-7b1b-0b06f5accf4f" [ 1206.055412] env[62627]: _type = "Task" [ 1206.055412] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.058641] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195203, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.067738] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52840b01-cb37-16f7-7b1b-0b06f5accf4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.115787] env[62627]: DEBUG oslo_vmware.api [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Task: {'id': task-2195202, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166527} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.115787] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1206.115787] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1206.115787] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1206.115787] env[62627]: INFO nova.compute.manager [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1206.116073] env[62627]: DEBUG oslo.service.loopingcall [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1206.116208] env[62627]: DEBUG nova.compute.manager [-] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1206.116300] env[62627]: DEBUG nova.network.neutron [-] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1206.210826] env[62627]: DEBUG nova.scheduler.client.report [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1206.291738] env[62627]: DEBUG nova.network.neutron [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance_info_cache with network_info: [{"id": "332cfd7e-9934-429a-bc74-275d48f07fdb", "address": "fa:16:3e:68:4c:72", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap332cfd7e-99", "ovs_interfaceid": "332cfd7e-9934-429a-bc74-275d48f07fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.560635] env[62627]: INFO nova.compute.manager [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Took 18.34 seconds to build instance. [ 1206.578943] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52840b01-cb37-16f7-7b1b-0b06f5accf4f, 'name': SearchDatastore_Task, 'duration_secs': 0.023287} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.578943] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195203, 'name': Rename_Task, 'duration_secs': 0.351357} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.578943] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1206.578943] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1206.580478] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.580478] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.580478] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1206.580478] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1206.580478] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d8aa61c-f7c6-4e8d-aa81-25bdbf2213e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.582665] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d82715e1-6c0e-42d8-b6d5-ac3ceab037f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.588835] env[62627]: DEBUG nova.network.neutron [req-ce910e2f-d5b9-4ea3-b50f-ab8a9f55d3be req-7a8eb1dd-93e2-4ddb-82e1-7140618a7f99 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updated VIF entry in instance network info cache for port 3166f86b-c852-4335-9c18-b4c68f51d8ab. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1206.589336] env[62627]: DEBUG nova.network.neutron [req-ce910e2f-d5b9-4ea3-b50f-ab8a9f55d3be req-7a8eb1dd-93e2-4ddb-82e1-7140618a7f99 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updating instance_info_cache with network_info: [{"id": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "address": "fa:16:3e:7b:bd:70", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3166f86b-c8", "ovs_interfaceid": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.593995] env[62627]: DEBUG nova.compute.manager [req-09f633b7-1dfc-42a2-83d5-b82336b77320 req-3c1374b7-3ba9-4b55-87be-0c8376503d89 service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Received event network-vif-deleted-f1d904fd-a7a5-45af-a796-47dcf5bf2e4e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1206.594321] env[62627]: INFO nova.compute.manager [req-09f633b7-1dfc-42a2-83d5-b82336b77320 req-3c1374b7-3ba9-4b55-87be-0c8376503d89 service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Neutron deleted interface f1d904fd-a7a5-45af-a796-47dcf5bf2e4e; detaching it from the instance and deleting it from the info cache [ 1206.594610] env[62627]: DEBUG nova.network.neutron [req-09f633b7-1dfc-42a2-83d5-b82336b77320 req-3c1374b7-3ba9-4b55-87be-0c8376503d89 service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.599504] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1206.599504] env[62627]: value = "task-2195204" [ 1206.599504] env[62627]: _type = "Task" [ 1206.599504] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.599857] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1206.600157] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1206.602174] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fadaf1b-7457-44f5-ba1a-ae45912199b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.617107] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195204, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.618199] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1206.618199] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e8fcba-139c-f89c-2e1a-e1e060a5e0a4" [ 1206.618199] env[62627]: _type = "Task" [ 1206.618199] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.629329] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e8fcba-139c-f89c-2e1a-e1e060a5e0a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.630804] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085afd1d-7859-42c6-ab85-21e977820129 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.651192] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance '36a8d4d3-d6b6-4b42-9514-0e29ee45aea9' progress to 0 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1206.715887] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.383s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.718148] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.992s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.718385] env[62627]: DEBUG nova.objects.instance [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lazy-loading 'resources' on Instance uuid 367a83b9-79f8-43b2-91dd-40d13193fa40 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.768035] env[62627]: INFO nova.network.neutron [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updating port 823150e9-f3ee-44ab-a36b-c8abdca8b70e with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1206.794051] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1206.794314] env[62627]: DEBUG nova.objects.instance [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'migration_context' on Instance uuid d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1207.062787] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ad0958e4-ed7d-42c1-9da9-f9ee8daec69d tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "30db4963-095b-4297-ad7c-0606451bd2b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.856s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.078784] env[62627]: DEBUG nova.network.neutron [-] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.091663] env[62627]: DEBUG oslo_concurrency.lockutils [req-ce910e2f-d5b9-4ea3-b50f-ab8a9f55d3be req-7a8eb1dd-93e2-4ddb-82e1-7140618a7f99 service nova] Releasing lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1207.102211] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a044974-c027-4a7e-ad44-c43d05616c6e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.117473] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195204, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.117809] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73c321f-de95-4f8e-8678-abc74e53dc57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.144855] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e8fcba-139c-f89c-2e1a-e1e060a5e0a4, 'name': SearchDatastore_Task, 'duration_secs': 0.012367} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.144855] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6836858-7758-4246-b3ce-9a0aab966be0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.163822] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1207.163822] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]526bcc40-3ded-89e0-c7f4-a435196be5a1" [ 1207.163822] env[62627]: _type = "Task" [ 1207.163822] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.168022] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1207.168022] env[62627]: DEBUG nova.compute.manager [req-09f633b7-1dfc-42a2-83d5-b82336b77320 req-3c1374b7-3ba9-4b55-87be-0c8376503d89 service nova] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Detach interface failed, port_id=f1d904fd-a7a5-45af-a796-47dcf5bf2e4e, reason: Instance 722d7bce-f0d7-4891-aa48-0ead10786a6b could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1207.168022] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61cf8f96-23da-4fa3-9065-a75db360c3d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.179124] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526bcc40-3ded-89e0-c7f4-a435196be5a1, 'name': SearchDatastore_Task, 'duration_secs': 0.010934} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.180435] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1207.180703] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 1109aae0-62d2-4c55-8c68-f224f3de8b55/1109aae0-62d2-4c55-8c68-f224f3de8b55.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1207.181022] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1207.181022] env[62627]: value = "task-2195205" [ 1207.181022] env[62627]: _type = "Task" [ 1207.181022] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.181247] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-905e2d28-52f3-4a4c-b4f6-dce770f26362 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.192479] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195205, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.194143] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1207.194143] env[62627]: value = "task-2195206" [ 1207.194143] env[62627]: _type = "Task" [ 1207.194143] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.297159] env[62627]: DEBUG nova.objects.base [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1207.300802] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583ffa0b-fbdb-4ff5-9fd7-8a3e4bfb435f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.328119] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c079602f-f9d7-4caf-ae35-c44f0a33631d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.335420] env[62627]: DEBUG oslo_vmware.api [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1207.335420] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]526d67c5-7965-9938-b656-91148ead2118" [ 1207.335420] env[62627]: _type = "Task" [ 1207.335420] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.348231] env[62627]: DEBUG oslo_vmware.api [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526d67c5-7965-9938-b656-91148ead2118, 'name': SearchDatastore_Task, 'duration_secs': 0.007689} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.348460] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.489660] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28de2910-eed2-41ea-953e-da1802bad931 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.498369] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2abeb02-054c-4ca3-ab3f-df8a6fc1b8d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.531580] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c912d0a5-e681-4835-97b7-eb8f6e99353f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.540610] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52af973a-1acc-4b73-a93b-8e511d190bc8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.557184] env[62627]: DEBUG nova.compute.provider_tree [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.581946] env[62627]: INFO nova.compute.manager [-] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Took 1.47 seconds to deallocate network for instance. [ 1207.613620] env[62627]: DEBUG oslo_vmware.api [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195204, 'name': PowerOnVM_Task, 'duration_secs': 0.823856} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.613844] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1207.614074] env[62627]: INFO nova.compute.manager [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Took 9.28 seconds to spawn the instance on the hypervisor. [ 1207.614333] env[62627]: DEBUG nova.compute.manager [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1207.615242] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e803561-abf4-4b8f-bc6d-0d1e058e702c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.695098] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195205, 'name': PowerOffVM_Task, 'duration_secs': 0.265555} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.699084] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1207.699417] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance '36a8d4d3-d6b6-4b42-9514-0e29ee45aea9' progress to 17 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1207.710010] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195206, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.061248] env[62627]: DEBUG nova.scheduler.client.report [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1208.088349] env[62627]: DEBUG oslo_concurrency.lockutils [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.136692] env[62627]: INFO nova.compute.manager [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Took 19.42 seconds to build instance. [ 1208.207774] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1208.208067] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1208.208232] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1208.208414] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1208.208560] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1208.208740] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1208.208968] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1208.209147] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1208.209317] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1208.209479] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1208.209653] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1208.215032] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195206, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617684} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.215032] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61825a46-189f-4834-bcfe-f0a9d311c6b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.227706] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 1109aae0-62d2-4c55-8c68-f224f3de8b55/1109aae0-62d2-4c55-8c68-f224f3de8b55.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1208.228087] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1208.228769] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a17c960-bc48-433a-a008-0a3baee1e076 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.238414] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1208.238414] env[62627]: value = "task-2195207" [ 1208.238414] env[62627]: _type = "Task" [ 1208.238414] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.238690] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1208.238690] env[62627]: value = "task-2195208" [ 1208.238690] env[62627]: _type = "Task" [ 1208.238690] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.249726] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195207, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.253000] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195208, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.566065] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.848s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.571692] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.164s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.571692] env[62627]: DEBUG nova.objects.instance [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1208.606268] env[62627]: INFO nova.scheduler.client.report [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Deleted allocations for instance 367a83b9-79f8-43b2-91dd-40d13193fa40 [ 1208.628832] env[62627]: DEBUG nova.compute.manager [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Received event network-changed-c9e055e0-0774-4741-a16c-139233d6803d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1208.629479] env[62627]: DEBUG nova.compute.manager [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Refreshing instance network info cache due to event network-changed-c9e055e0-0774-4741-a16c-139233d6803d. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1208.629479] env[62627]: DEBUG oslo_concurrency.lockutils [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] Acquiring lock "refresh_cache-30db4963-095b-4297-ad7c-0606451bd2b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.629479] env[62627]: DEBUG oslo_concurrency.lockutils [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] Acquired lock "refresh_cache-30db4963-095b-4297-ad7c-0606451bd2b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.629648] env[62627]: DEBUG nova.network.neutron [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Refreshing network info cache for port c9e055e0-0774-4741-a16c-139233d6803d {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1208.638482] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a241d935-21a7-455c-8bd5-5e84db96ce79 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "b68042fd-4955-41e4-b14f-1f8268ace377" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.950s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.752333] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195207, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.136133} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.755343] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1208.755653] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195208, 'name': ReconfigVM_Task, 'duration_secs': 0.245093} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.756364] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa1d415-20ad-44c4-ba94-5e4f137e0501 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.758819] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance '36a8d4d3-d6b6-4b42-9514-0e29ee45aea9' progress to 33 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1208.784189] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 1109aae0-62d2-4c55-8c68-f224f3de8b55/1109aae0-62d2-4c55-8c68-f224f3de8b55.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1208.784900] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-472c6b8e-cf4f-43fb-b352-1cc20c6c4232 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.808397] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1208.808397] env[62627]: value = "task-2195209" [ 1208.808397] env[62627]: _type = "Task" [ 1208.808397] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.818593] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.832042] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.832244] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.832437] env[62627]: DEBUG nova.network.neutron [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1209.116710] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7061ffc6-121c-40f8-b94f-c1227b20e94d tempest-ServersWithSpecificFlavorTestJSON-1227304686 tempest-ServersWithSpecificFlavorTestJSON-1227304686-project-member] Lock "367a83b9-79f8-43b2-91dd-40d13193fa40" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.128s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.265443] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1209.265701] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1209.265863] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1209.266059] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1209.266222] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1209.266404] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1209.266621] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1209.266778] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1209.266947] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1209.267125] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1209.267304] env[62627]: DEBUG nova.virt.hardware [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1209.272925] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1209.273562] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-510182b8-5a18-4ca0-83f3-25f40ffaeae9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.296100] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1209.296100] env[62627]: value = "task-2195210" [ 1209.296100] env[62627]: _type = "Task" [ 1209.296100] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.304831] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195210, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.318653] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.524676] env[62627]: DEBUG nova.network.neutron [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Updated VIF entry in instance network info cache for port c9e055e0-0774-4741-a16c-139233d6803d. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1209.525097] env[62627]: DEBUG nova.network.neutron [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Updating instance_info_cache with network_info: [{"id": "c9e055e0-0774-4741-a16c-139233d6803d", "address": "fa:16:3e:5d:44:b9", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9e055e0-07", "ovs_interfaceid": "c9e055e0-0774-4741-a16c-139233d6803d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.583188] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c29c63c6-b1a2-4d59-9d11-087fcb1a36f2 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.584288] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.236s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.612978] env[62627]: DEBUG nova.network.neutron [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updating instance_info_cache with network_info: [{"id": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "address": "fa:16:3e:85:08:41", "network": {"id": "280614ef-fbde-41d9-927f-7471cc50aa14", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-970093695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d5055e1a674886a161a96b0e095451", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap823150e9-f3", "ovs_interfaceid": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.809061] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195210, 'name': ReconfigVM_Task, 'duration_secs': 0.427163} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.813017] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1209.813500] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529c54cc-d952-4e2c-87ef-739b60107d6e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.829020] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195209, 'name': ReconfigVM_Task, 'duration_secs': 0.906313} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.838100] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 1109aae0-62d2-4c55-8c68-f224f3de8b55/1109aae0-62d2-4c55-8c68-f224f3de8b55.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1209.846605] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9/36a8d4d3-d6b6-4b42-9514-0e29ee45aea9.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1209.847037] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9319b4d2-b279-4faa-aab2-8c3a6c6f0b83 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.848951] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cde4c68d-7ee2-4bcc-b014-6a7bbbe55ab2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.867493] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1209.867493] env[62627]: value = "task-2195211" [ 1209.867493] env[62627]: _type = "Task" [ 1209.867493] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.869189] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1209.869189] env[62627]: value = "task-2195212" [ 1209.869189] env[62627]: _type = "Task" [ 1209.869189] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.883030] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195212, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.887414] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195211, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.028756] env[62627]: DEBUG oslo_concurrency.lockutils [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] Releasing lock "refresh_cache-30db4963-095b-4297-ad7c-0606451bd2b6" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.029242] env[62627]: DEBUG nova.compute.manager [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Received event network-vif-plugged-823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1210.029504] env[62627]: DEBUG oslo_concurrency.lockutils [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] Acquiring lock "b282093d-8013-436d-94d2-bc26161456ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.029803] env[62627]: DEBUG oslo_concurrency.lockutils [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] Lock "b282093d-8013-436d-94d2-bc26161456ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.030048] env[62627]: DEBUG oslo_concurrency.lockutils [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] Lock "b282093d-8013-436d-94d2-bc26161456ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.030282] env[62627]: DEBUG nova.compute.manager [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] No waiting events found dispatching network-vif-plugged-823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1210.030515] env[62627]: WARNING nova.compute.manager [req-90a59d71-64ed-49c2-a567-487c41d743c2 req-d4d75489-5828-4645-a94c-8a1c0a4cc0fb service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Received unexpected event network-vif-plugged-823150e9-f3ee-44ab-a36b-c8abdca8b70e for instance with vm_state shelved_offloaded and task_state spawning. [ 1210.116391] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.147034] env[62627]: DEBUG nova.virt.hardware [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='aa0e09ab49b2708dd18bca3f61d3be64',container_format='bare',created_at=2025-01-30T20:38:23Z,direct_url=,disk_format='vmdk',id=0d9bb31a-1d73-47ee-a566-e579a137eff3,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1756924044-shelved',owner='97d5055e1a674886a161a96b0e095451',properties=ImageMetaProps,protected=,size=31594496,status='active',tags=,updated_at=2025-01-30T20:38:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1210.147320] env[62627]: DEBUG nova.virt.hardware [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1210.147484] env[62627]: DEBUG nova.virt.hardware [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1210.147732] env[62627]: DEBUG nova.virt.hardware [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1210.147911] env[62627]: DEBUG nova.virt.hardware [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1210.148088] env[62627]: DEBUG nova.virt.hardware [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1210.148313] env[62627]: DEBUG nova.virt.hardware [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1210.148479] env[62627]: DEBUG nova.virt.hardware [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1210.148651] env[62627]: DEBUG nova.virt.hardware [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1210.149068] env[62627]: DEBUG nova.virt.hardware [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1210.149658] env[62627]: DEBUG nova.virt.hardware [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1210.150571] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babd1367-8aaa-4dc2-9647-fdd481db2961 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.164259] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4daf3e-b63e-4400-84f4-03a7404ba048 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.181363] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:08:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8233b50c-be21-441a-a357-a29138a95b8b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '823150e9-f3ee-44ab-a36b-c8abdca8b70e', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1210.189054] env[62627]: DEBUG oslo.service.loopingcall [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1210.192139] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b282093d-8013-436d-94d2-bc26161456ea] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1210.192646] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0070c5e-996a-426a-89f6-c218b10f60d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.217046] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1210.217046] env[62627]: value = "task-2195213" [ 1210.217046] env[62627]: _type = "Task" [ 1210.217046] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.229033] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195213, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.338286] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1285861c-5e15-4824-bafb-8ea357656ae7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.346940] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1214a226-4f7b-49c2-aa58-b14aa9a1551b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.387736] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5301973-698c-4e27-b726-fb3ee1479540 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.396465] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195211, 'name': Rename_Task, 'duration_secs': 0.157065} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.401237] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1210.401626] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.401853] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc2377e6-ff48-44c2-b2f0-51e9b614cdcf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.404393] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41892ae8-6bb1-4358-80d7-829c866967ee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.422093] env[62627]: DEBUG nova.compute.provider_tree [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1210.425180] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1210.425180] env[62627]: value = "task-2195214" [ 1210.425180] env[62627]: _type = "Task" [ 1210.425180] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.435541] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195214, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.657925] env[62627]: DEBUG nova.compute.manager [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Received event network-changed-823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1210.658375] env[62627]: DEBUG nova.compute.manager [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Refreshing instance network info cache due to event network-changed-823150e9-f3ee-44ab-a36b-c8abdca8b70e. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1210.658669] env[62627]: DEBUG oslo_concurrency.lockutils [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] Acquiring lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.658802] env[62627]: DEBUG oslo_concurrency.lockutils [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] Acquired lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.658976] env[62627]: DEBUG nova.network.neutron [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Refreshing network info cache for port 823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1210.728355] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195213, 'name': CreateVM_Task, 'duration_secs': 0.381162} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.728506] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b282093d-8013-436d-94d2-bc26161456ea] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1210.729200] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.729366] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.729765] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1210.730144] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c78f0291-a60f-4dc4-a89d-3c47eb08a7c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.735378] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1210.735378] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5217d42f-ce49-ccc0-ac59-263ff5b662c9" [ 1210.735378] env[62627]: _type = "Task" [ 1210.735378] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.744504] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5217d42f-ce49-ccc0-ac59-263ff5b662c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.891213] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195212, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.927205] env[62627]: DEBUG nova.scheduler.client.report [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1210.942400] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195214, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.246575] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1211.246887] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Processing image 0d9bb31a-1d73-47ee-a566-e579a137eff3 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1211.247179] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3/0d9bb31a-1d73-47ee-a566-e579a137eff3.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1211.247371] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3/0d9bb31a-1d73-47ee-a566-e579a137eff3.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.247605] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1211.247920] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f21fb8fc-6c07-4ab1-a593-6d16e38e12f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.257832] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1211.258078] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1211.258876] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-450fef18-3696-4b67-a6c9-e5d9361e2d29 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.264873] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1211.264873] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ff2fd9-410c-4de9-fa69-3f36bc3af072" [ 1211.264873] env[62627]: _type = "Task" [ 1211.264873] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.273561] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ff2fd9-410c-4de9-fa69-3f36bc3af072, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.392577] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195212, 'name': ReconfigVM_Task, 'duration_secs': 1.069599} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.392923] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9/36a8d4d3-d6b6-4b42-9514-0e29ee45aea9.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1211.393300] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance '36a8d4d3-d6b6-4b42-9514-0e29ee45aea9' progress to 50 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1211.447686] env[62627]: DEBUG oslo_vmware.api [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195214, 'name': PowerOnVM_Task, 'duration_secs': 0.775513} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.452085] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1211.452320] env[62627]: INFO nova.compute.manager [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1211.452503] env[62627]: DEBUG nova.compute.manager [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1211.453540] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255efa77-0e5c-4c21-9d8a-892ab368dc82 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.779350] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Preparing fetch location {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1211.779648] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Fetch image to [datastore2] OSTACK_IMG_51ad4c3a-902f-470e-bfe7-61161a046617/OSTACK_IMG_51ad4c3a-902f-470e-bfe7-61161a046617.vmdk {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1211.779850] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Downloading stream optimized image 0d9bb31a-1d73-47ee-a566-e579a137eff3 to [datastore2] OSTACK_IMG_51ad4c3a-902f-470e-bfe7-61161a046617/OSTACK_IMG_51ad4c3a-902f-470e-bfe7-61161a046617.vmdk on the data store datastore2 as vApp {{(pid=62627) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1211.780192] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Downloading image file data 0d9bb31a-1d73-47ee-a566-e579a137eff3 to the ESX as VM named 'OSTACK_IMG_51ad4c3a-902f-470e-bfe7-61161a046617' {{(pid=62627) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1211.829138] env[62627]: DEBUG nova.network.neutron [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updated VIF entry in instance network info cache for port 823150e9-f3ee-44ab-a36b-c8abdca8b70e. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1211.829482] env[62627]: DEBUG nova.network.neutron [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updating instance_info_cache with network_info: [{"id": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "address": "fa:16:3e:85:08:41", "network": {"id": "280614ef-fbde-41d9-927f-7471cc50aa14", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-970093695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d5055e1a674886a161a96b0e095451", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap823150e9-f3", "ovs_interfaceid": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.874371] env[62627]: DEBUG oslo_vmware.rw_handles [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1211.874371] env[62627]: value = "resgroup-9" [ 1211.874371] env[62627]: _type = "ResourcePool" [ 1211.874371] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1211.874371] env[62627]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-7e72084f-6ad9-4a12-a340-5c8521bb48ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.895709] env[62627]: DEBUG oslo_vmware.rw_handles [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lease: (returnval){ [ 1211.895709] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234447e-b7bf-c0e4-9ecd-8ab5a31e1028" [ 1211.895709] env[62627]: _type = "HttpNfcLease" [ 1211.895709] env[62627]: } obtained for vApp import into resource pool (val){ [ 1211.895709] env[62627]: value = "resgroup-9" [ 1211.895709] env[62627]: _type = "ResourcePool" [ 1211.895709] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1211.896185] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the lease: (returnval){ [ 1211.896185] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234447e-b7bf-c0e4-9ecd-8ab5a31e1028" [ 1211.896185] env[62627]: _type = "HttpNfcLease" [ 1211.896185] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1211.902079] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1844531-e22e-4152-9127-b0a2ab68443d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.911030] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1211.911030] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234447e-b7bf-c0e4-9ecd-8ab5a31e1028" [ 1211.911030] env[62627]: _type = "HttpNfcLease" [ 1211.911030] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1211.930021] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870c17c5-5026-46fc-81aa-e14e20a81fb1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.948262] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance '36a8d4d3-d6b6-4b42-9514-0e29ee45aea9' progress to 67 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1211.953314] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.369s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.953659] env[62627]: DEBUG nova.compute.manager [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62627) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1211.956418] env[62627]: DEBUG oslo_concurrency.lockutils [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.868s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.956747] env[62627]: DEBUG nova.objects.instance [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Lazy-loading 'resources' on Instance uuid 722d7bce-f0d7-4891-aa48-0ead10786a6b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1211.974121] env[62627]: INFO nova.compute.manager [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Took 14.76 seconds to build instance. [ 1212.332634] env[62627]: DEBUG oslo_concurrency.lockutils [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] Releasing lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1212.332938] env[62627]: DEBUG nova.compute.manager [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Received event network-changed-9a79e592-1487-4b96-93f2-66df8b0224a4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1212.333190] env[62627]: DEBUG nova.compute.manager [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Refreshing instance network info cache due to event network-changed-9a79e592-1487-4b96-93f2-66df8b0224a4. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1212.333420] env[62627]: DEBUG oslo_concurrency.lockutils [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] Acquiring lock "refresh_cache-b68042fd-4955-41e4-b14f-1f8268ace377" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1212.333566] env[62627]: DEBUG oslo_concurrency.lockutils [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] Acquired lock "refresh_cache-b68042fd-4955-41e4-b14f-1f8268ace377" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.333729] env[62627]: DEBUG nova.network.neutron [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Refreshing network info cache for port 9a79e592-1487-4b96-93f2-66df8b0224a4 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1212.405230] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1212.405230] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234447e-b7bf-c0e4-9ecd-8ab5a31e1028" [ 1212.405230] env[62627]: _type = "HttpNfcLease" [ 1212.405230] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1212.476508] env[62627]: DEBUG oslo_concurrency.lockutils [None req-94fd37cb-1bf0-4a2a-b9bb-62e8d018ae92 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.270s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.495889] env[62627]: DEBUG nova.network.neutron [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Port d885a4e0-fbdd-41cf-9e49-1e16cd392ecf binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1212.553742] env[62627]: INFO nova.scheduler.client.report [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleted allocation for migration 41203292-9d7e-4bc8-8a03-a07c60121f37 [ 1212.696120] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49397b4-33d5-467d-942e-70894df01a62 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.704607] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b736eb3e-53fb-4d9d-a49a-a2921ce9a38f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.741364] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc7a9f8-bec3-4b03-b187-be2d9014d4a2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.750475] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fb68c1-b05d-408c-9487-dec71c37d02b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.767080] env[62627]: DEBUG nova.compute.provider_tree [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.861029] env[62627]: DEBUG nova.objects.instance [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'flavor' on Instance uuid d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.911608] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1212.911608] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234447e-b7bf-c0e4-9ecd-8ab5a31e1028" [ 1212.911608] env[62627]: _type = "HttpNfcLease" [ 1212.911608] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1212.927267] env[62627]: DEBUG nova.compute.manager [req-c0beba6c-2714-414f-9ce3-6e06268f820a req-5f3f0324-2e6b-4fcd-9f58-34ae2c985698 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Received event network-changed-58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1212.927542] env[62627]: DEBUG nova.compute.manager [req-c0beba6c-2714-414f-9ce3-6e06268f820a req-5f3f0324-2e6b-4fcd-9f58-34ae2c985698 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing instance network info cache due to event network-changed-58815871-fdac-4406-a61e-4d7fca125122. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1212.927855] env[62627]: DEBUG oslo_concurrency.lockutils [req-c0beba6c-2714-414f-9ce3-6e06268f820a req-5f3f0324-2e6b-4fcd-9f58-34ae2c985698 service nova] Acquiring lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1212.928030] env[62627]: DEBUG oslo_concurrency.lockutils [req-c0beba6c-2714-414f-9ce3-6e06268f820a req-5f3f0324-2e6b-4fcd-9f58-34ae2c985698 service nova] Acquired lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.928203] env[62627]: DEBUG nova.network.neutron [req-c0beba6c-2714-414f-9ce3-6e06268f820a req-5f3f0324-2e6b-4fcd-9f58-34ae2c985698 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing network info cache for port 58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1213.063096] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6a7107c0-31d8-4861-b65d-f4bcfba86935 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.545s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.262364] env[62627]: DEBUG nova.network.neutron [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Updated VIF entry in instance network info cache for port 9a79e592-1487-4b96-93f2-66df8b0224a4. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1213.262825] env[62627]: DEBUG nova.network.neutron [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Updating instance_info_cache with network_info: [{"id": "9a79e592-1487-4b96-93f2-66df8b0224a4", "address": "fa:16:3e:56:2b:23", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a79e592-14", "ovs_interfaceid": "9a79e592-1487-4b96-93f2-66df8b0224a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.270546] env[62627]: DEBUG nova.scheduler.client.report [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1213.365047] env[62627]: DEBUG oslo_concurrency.lockutils [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.365257] env[62627]: DEBUG oslo_concurrency.lockutils [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.365435] env[62627]: DEBUG nova.network.neutron [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1213.365634] env[62627]: DEBUG nova.objects.instance [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'info_cache' on Instance uuid d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.406120] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1213.406120] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234447e-b7bf-c0e4-9ecd-8ab5a31e1028" [ 1213.406120] env[62627]: _type = "HttpNfcLease" [ 1213.406120] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1213.524401] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.524651] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.524828] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.709124] env[62627]: DEBUG nova.network.neutron [req-c0beba6c-2714-414f-9ce3-6e06268f820a req-5f3f0324-2e6b-4fcd-9f58-34ae2c985698 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updated VIF entry in instance network info cache for port 58815871-fdac-4406-a61e-4d7fca125122. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1213.709525] env[62627]: DEBUG nova.network.neutron [req-c0beba6c-2714-414f-9ce3-6e06268f820a req-5f3f0324-2e6b-4fcd-9f58-34ae2c985698 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updating instance_info_cache with network_info: [{"id": "58815871-fdac-4406-a61e-4d7fca125122", "address": "fa:16:3e:69:fc:43", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58815871-fd", "ovs_interfaceid": "58815871-fdac-4406-a61e-4d7fca125122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.765375] env[62627]: DEBUG oslo_concurrency.lockutils [req-39d72fa6-b10d-43c9-8299-007d6aed24ae req-6caa190a-4073-4754-91ee-70c3c8c193f3 service nova] Releasing lock "refresh_cache-b68042fd-4955-41e4-b14f-1f8268ace377" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1213.775434] env[62627]: DEBUG oslo_concurrency.lockutils [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.819s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.804164] env[62627]: INFO nova.scheduler.client.report [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Deleted allocations for instance 722d7bce-f0d7-4891-aa48-0ead10786a6b [ 1213.871094] env[62627]: DEBUG nova.objects.base [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1213.907754] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1213.907754] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234447e-b7bf-c0e4-9ecd-8ab5a31e1028" [ 1213.907754] env[62627]: _type = "HttpNfcLease" [ 1213.907754] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1214.220429] env[62627]: DEBUG oslo_concurrency.lockutils [req-c0beba6c-2714-414f-9ce3-6e06268f820a req-5f3f0324-2e6b-4fcd-9f58-34ae2c985698 service nova] Releasing lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1214.316302] env[62627]: DEBUG oslo_concurrency.lockutils [None req-47083594-eb12-4bb4-a09f-9c04167581bb tempest-ServerMetadataNegativeTestJSON-1118921947 tempest-ServerMetadataNegativeTestJSON-1118921947-project-member] Lock "722d7bce-f0d7-4891-aa48-0ead10786a6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.893s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.406223] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1214.406223] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234447e-b7bf-c0e4-9ecd-8ab5a31e1028" [ 1214.406223] env[62627]: _type = "HttpNfcLease" [ 1214.406223] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1214.634430] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1214.634430] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.634430] env[62627]: DEBUG nova.network.neutron [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1214.720619] env[62627]: INFO nova.compute.manager [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Rebuilding instance [ 1214.781573] env[62627]: DEBUG nova.compute.manager [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1214.781573] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ef580c-1063-4c41-8e3d-c1459f26580d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.907042] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1214.907042] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234447e-b7bf-c0e4-9ecd-8ab5a31e1028" [ 1214.907042] env[62627]: _type = "HttpNfcLease" [ 1214.907042] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1214.966064] env[62627]: DEBUG nova.compute.manager [req-26cadb7a-1b42-4c27-83af-877bb25b2757 req-508fbd38-b3f4-4b17-b9c9-21e6c5338834 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Received event network-changed-3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1214.966064] env[62627]: DEBUG nova.compute.manager [req-26cadb7a-1b42-4c27-83af-877bb25b2757 req-508fbd38-b3f4-4b17-b9c9-21e6c5338834 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Refreshing instance network info cache due to event network-changed-3166f86b-c852-4335-9c18-b4c68f51d8ab. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1214.966306] env[62627]: DEBUG oslo_concurrency.lockutils [req-26cadb7a-1b42-4c27-83af-877bb25b2757 req-508fbd38-b3f4-4b17-b9c9-21e6c5338834 service nova] Acquiring lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1214.966485] env[62627]: DEBUG oslo_concurrency.lockutils [req-26cadb7a-1b42-4c27-83af-877bb25b2757 req-508fbd38-b3f4-4b17-b9c9-21e6c5338834 service nova] Acquired lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.966813] env[62627]: DEBUG nova.network.neutron [req-26cadb7a-1b42-4c27-83af-877bb25b2757 req-508fbd38-b3f4-4b17-b9c9-21e6c5338834 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Refreshing network info cache for port 3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1215.006878] env[62627]: DEBUG nova.compute.manager [req-288fec43-97bf-4852-815b-32902cd08536 req-7cdcb010-9f40-4a9d-93d0-993cb619e922 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Received event network-changed-3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1215.007088] env[62627]: DEBUG nova.compute.manager [req-288fec43-97bf-4852-815b-32902cd08536 req-7cdcb010-9f40-4a9d-93d0-993cb619e922 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Refreshing instance network info cache due to event network-changed-3166f86b-c852-4335-9c18-b4c68f51d8ab. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1215.010911] env[62627]: DEBUG oslo_concurrency.lockutils [req-288fec43-97bf-4852-815b-32902cd08536 req-7cdcb010-9f40-4a9d-93d0-993cb619e922 service nova] Acquiring lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1215.037709] env[62627]: DEBUG nova.network.neutron [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance_info_cache with network_info: [{"id": "332cfd7e-9934-429a-bc74-275d48f07fdb", "address": "fa:16:3e:68:4c:72", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap332cfd7e-99", "ovs_interfaceid": "332cfd7e-9934-429a-bc74-275d48f07fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.407648] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1215.407648] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234447e-b7bf-c0e4-9ecd-8ab5a31e1028" [ 1215.407648] env[62627]: _type = "HttpNfcLease" [ 1215.407648] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1215.408366] env[62627]: DEBUG oslo_vmware.rw_handles [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1215.408366] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5234447e-b7bf-c0e4-9ecd-8ab5a31e1028" [ 1215.408366] env[62627]: _type = "HttpNfcLease" [ 1215.408366] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1215.409432] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0784c7-c1fd-4a72-a69f-179c62711e3b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.417481] env[62627]: DEBUG oslo_vmware.rw_handles [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e69889-5034-5adb-bbc2-f61b51dab62f/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1215.419080] env[62627]: DEBUG oslo_vmware.rw_handles [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Creating HTTP connection to write to file with size = 31594496 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e69889-5034-5adb-bbc2-f61b51dab62f/disk-0.vmdk. {{(pid=62627) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1215.490189] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-82c1df37-4273-4bd6-897d-3c61d1632ce2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.542284] env[62627]: DEBUG oslo_concurrency.lockutils [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1215.794338] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1215.794338] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b09cee8f-c68d-4af3-b181-76f84bf881b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.802978] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for the task: (returnval){ [ 1215.802978] env[62627]: value = "task-2195216" [ 1215.802978] env[62627]: _type = "Task" [ 1215.802978] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.815580] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195216, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.887685] env[62627]: DEBUG nova.network.neutron [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance_info_cache with network_info: [{"id": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "address": "fa:16:3e:c6:8b:aa", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd885a4e0-fb", "ovs_interfaceid": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.960792] env[62627]: DEBUG nova.network.neutron [req-26cadb7a-1b42-4c27-83af-877bb25b2757 req-508fbd38-b3f4-4b17-b9c9-21e6c5338834 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updated VIF entry in instance network info cache for port 3166f86b-c852-4335-9c18-b4c68f51d8ab. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1215.961702] env[62627]: DEBUG nova.network.neutron [req-26cadb7a-1b42-4c27-83af-877bb25b2757 req-508fbd38-b3f4-4b17-b9c9-21e6c5338834 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updating instance_info_cache with network_info: [{"id": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "address": "fa:16:3e:7b:bd:70", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3166f86b-c8", "ovs_interfaceid": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.315017] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195216, 'name': PowerOffVM_Task, 'duration_secs': 0.188511} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.315017] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1216.315017] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1216.315017] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de38c696-f3f5-4fa6-ad46-719fcdf5e0cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.325528] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for the task: (returnval){ [ 1216.325528] env[62627]: value = "task-2195217" [ 1216.325528] env[62627]: _type = "Task" [ 1216.325528] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.334856] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.393548] env[62627]: DEBUG oslo_concurrency.lockutils [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1216.470361] env[62627]: DEBUG oslo_concurrency.lockutils [req-26cadb7a-1b42-4c27-83af-877bb25b2757 req-508fbd38-b3f4-4b17-b9c9-21e6c5338834 service nova] Releasing lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1216.471126] env[62627]: DEBUG oslo_concurrency.lockutils [req-288fec43-97bf-4852-815b-32902cd08536 req-7cdcb010-9f40-4a9d-93d0-993cb619e922 service nova] Acquired lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.471482] env[62627]: DEBUG nova.network.neutron [req-288fec43-97bf-4852-815b-32902cd08536 req-7cdcb010-9f40-4a9d-93d0-993cb619e922 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Refreshing network info cache for port 3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1216.549013] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1216.549383] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23ffe59a-c24a-4a2a-92ce-6b25ce71d17e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.558644] env[62627]: DEBUG oslo_vmware.api [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1216.558644] env[62627]: value = "task-2195218" [ 1216.558644] env[62627]: _type = "Task" [ 1216.558644] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.570339] env[62627]: DEBUG oslo_vmware.api [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.845188] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1216.846838] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1216.847220] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447792', 'volume_id': '5211d219-5b96-4822-bdba-9d41bc2363b9', 'name': 'volume-5211d219-5b96-4822-bdba-9d41bc2363b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c25fe185-2d6a-40a8-be06-84ee22a5464e', 'attached_at': '', 'detached_at': '', 'volume_id': '5211d219-5b96-4822-bdba-9d41bc2363b9', 'serial': '5211d219-5b96-4822-bdba-9d41bc2363b9'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1216.848783] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b6927a-731a-49b4-b15a-642addc8fb2a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.881238] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e248586f-aadb-4fb8-9402-3e800368ac9f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.895154] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8456a36-f573-4be0-9277-de58394168f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.926188] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2ef109-12dc-461d-8315-6f150cf02ecb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.954423] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] The volume has not been displaced from its original location: [datastore2] volume-5211d219-5b96-4822-bdba-9d41bc2363b9/volume-5211d219-5b96-4822-bdba-9d41bc2363b9.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1216.960575] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Reconfiguring VM instance instance-0000005e to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1216.968013] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d73bba4-8e60-43b4-a79a-25c7d36d53ca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.984339] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a4e14c-2357-412d-ae81-e40363e25858 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.014969] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0372aa-597a-40b6-bbaf-88bcf0f95e32 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.018144] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for the task: (returnval){ [ 1217.018144] env[62627]: value = "task-2195219" [ 1217.018144] env[62627]: _type = "Task" [ 1217.018144] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.025143] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance '36a8d4d3-d6b6-4b42-9514-0e29ee45aea9' progress to 83 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1217.035610] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195219, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.048540] env[62627]: DEBUG nova.compute.manager [req-6d199dcc-51dc-4e26-baf5-3cc3186f466f req-7bac1db2-a740-4c5a-ad5b-618e3ce200bc service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Received event network-changed-58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1217.048830] env[62627]: DEBUG nova.compute.manager [req-6d199dcc-51dc-4e26-baf5-3cc3186f466f req-7bac1db2-a740-4c5a-ad5b-618e3ce200bc service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing instance network info cache due to event network-changed-58815871-fdac-4406-a61e-4d7fca125122. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1217.049202] env[62627]: DEBUG oslo_concurrency.lockutils [req-6d199dcc-51dc-4e26-baf5-3cc3186f466f req-7bac1db2-a740-4c5a-ad5b-618e3ce200bc service nova] Acquiring lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1217.049454] env[62627]: DEBUG oslo_concurrency.lockutils [req-6d199dcc-51dc-4e26-baf5-3cc3186f466f req-7bac1db2-a740-4c5a-ad5b-618e3ce200bc service nova] Acquired lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.049718] env[62627]: DEBUG nova.network.neutron [req-6d199dcc-51dc-4e26-baf5-3cc3186f466f req-7bac1db2-a740-4c5a-ad5b-618e3ce200bc service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing network info cache for port 58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1217.080023] env[62627]: DEBUG oslo_vmware.api [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195218, 'name': PowerOnVM_Task, 'duration_secs': 0.503211} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.080211] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1217.080571] env[62627]: DEBUG nova.compute.manager [None req-04a55e73-02a7-4ab6-acc5-d6b2d972799b tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1217.081331] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccfbf13-a217-4e94-80ec-408a4692c14b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.150839] env[62627]: DEBUG oslo_vmware.rw_handles [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Completed reading data from the image iterator. {{(pid=62627) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1217.151095] env[62627]: DEBUG oslo_vmware.rw_handles [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e69889-5034-5adb-bbc2-f61b51dab62f/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1217.152424] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c0802e-17e3-4876-9824-0acf721f93d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.161066] env[62627]: DEBUG oslo_vmware.rw_handles [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e69889-5034-5adb-bbc2-f61b51dab62f/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1217.161237] env[62627]: DEBUG oslo_vmware.rw_handles [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e69889-5034-5adb-bbc2-f61b51dab62f/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1217.161512] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3af2cda4-bc4c-4217-900d-792bc251a1d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.506210] env[62627]: DEBUG nova.network.neutron [req-288fec43-97bf-4852-815b-32902cd08536 req-7cdcb010-9f40-4a9d-93d0-993cb619e922 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updated VIF entry in instance network info cache for port 3166f86b-c852-4335-9c18-b4c68f51d8ab. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1217.506824] env[62627]: DEBUG nova.network.neutron [req-288fec43-97bf-4852-815b-32902cd08536 req-7cdcb010-9f40-4a9d-93d0-993cb619e922 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updating instance_info_cache with network_info: [{"id": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "address": "fa:16:3e:7b:bd:70", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3166f86b-c8", "ovs_interfaceid": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.534029] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195219, 'name': ReconfigVM_Task, 'duration_secs': 0.217423} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.534029] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1217.534029] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Reconfigured VM instance instance-0000005e to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1217.538769] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a24288c9-a2e8-4bf9-8030-c5c6effa8a6e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.541024] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e68a40a-95cd-4169-a74b-63df76c729c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.558045] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1217.558045] env[62627]: value = "task-2195220" [ 1217.558045] env[62627]: _type = "Task" [ 1217.558045] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.559641] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for the task: (returnval){ [ 1217.559641] env[62627]: value = "task-2195221" [ 1217.559641] env[62627]: _type = "Task" [ 1217.559641] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.574230] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195220, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.579715] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195221, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.639044] env[62627]: DEBUG oslo_vmware.rw_handles [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e69889-5034-5adb-bbc2-f61b51dab62f/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1217.639044] env[62627]: INFO nova.virt.vmwareapi.images [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Downloaded image file data 0d9bb31a-1d73-47ee-a566-e579a137eff3 [ 1217.639044] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502b2aaa-08c9-4dc8-bf2a-802d02ffe043 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.657834] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5773b419-9d91-4876-950a-62a97a97730b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.755874] env[62627]: INFO nova.virt.vmwareapi.images [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] The imported VM was unregistered [ 1217.758698] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Caching image {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1217.758987] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Creating directory with path [datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1217.759360] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f8754d7-73d6-4767-a9bf-5eb2b765460c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.793544] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Created directory with path [datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1217.793761] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_51ad4c3a-902f-470e-bfe7-61161a046617/OSTACK_IMG_51ad4c3a-902f-470e-bfe7-61161a046617.vmdk to [datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3/0d9bb31a-1d73-47ee-a566-e579a137eff3.vmdk. {{(pid=62627) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1217.794072] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-f63f381b-55f3-4cdb-b79a-86bca73d0fbe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.804144] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1217.804144] env[62627]: value = "task-2195223" [ 1217.804144] env[62627]: _type = "Task" [ 1217.804144] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.813957] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195223, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.936384] env[62627]: DEBUG nova.network.neutron [req-6d199dcc-51dc-4e26-baf5-3cc3186f466f req-7bac1db2-a740-4c5a-ad5b-618e3ce200bc service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updated VIF entry in instance network info cache for port 58815871-fdac-4406-a61e-4d7fca125122. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1217.936890] env[62627]: DEBUG nova.network.neutron [req-6d199dcc-51dc-4e26-baf5-3cc3186f466f req-7bac1db2-a740-4c5a-ad5b-618e3ce200bc service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updating instance_info_cache with network_info: [{"id": "58815871-fdac-4406-a61e-4d7fca125122", "address": "fa:16:3e:69:fc:43", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58815871-fd", "ovs_interfaceid": "58815871-fdac-4406-a61e-4d7fca125122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.010098] env[62627]: DEBUG oslo_concurrency.lockutils [req-288fec43-97bf-4852-815b-32902cd08536 req-7cdcb010-9f40-4a9d-93d0-993cb619e922 service nova] Releasing lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.081025] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195220, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.083465] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195221, 'name': ReconfigVM_Task, 'duration_secs': 0.15566} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.084645] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447792', 'volume_id': '5211d219-5b96-4822-bdba-9d41bc2363b9', 'name': 'volume-5211d219-5b96-4822-bdba-9d41bc2363b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c25fe185-2d6a-40a8-be06-84ee22a5464e', 'attached_at': '', 'detached_at': '', 'volume_id': '5211d219-5b96-4822-bdba-9d41bc2363b9', 'serial': '5211d219-5b96-4822-bdba-9d41bc2363b9'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1218.085095] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1218.086508] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b3b03b-6e92-4c99-a80c-52e994b7d8bd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.094304] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1218.094828] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6850541e-3279-437d-9ef2-03ec852965b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.174183] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1218.174451] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1218.174632] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Deleting the datastore file [datastore2] c25fe185-2d6a-40a8-be06-84ee22a5464e {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1218.174912] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff9ea315-a71e-4b3e-b8d3-48bb2512cd71 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.184115] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for the task: (returnval){ [ 1218.184115] env[62627]: value = "task-2195225" [ 1218.184115] env[62627]: _type = "Task" [ 1218.184115] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.194522] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195225, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.318831] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195223, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.441559] env[62627]: DEBUG oslo_concurrency.lockutils [req-6d199dcc-51dc-4e26-baf5-3cc3186f466f req-7bac1db2-a740-4c5a-ad5b-618e3ce200bc service nova] Releasing lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.573639] env[62627]: DEBUG oslo_vmware.api [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195220, 'name': PowerOnVM_Task, 'duration_secs': 0.786885} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.573639] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1218.573639] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-89dad047-a7ed-43d4-8e87-e722c0067cec tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance '36a8d4d3-d6b6-4b42-9514-0e29ee45aea9' progress to 100 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1218.700528] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195225, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.822112] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195223, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.028250] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.028441] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.028490] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.032039] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.032039] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.032039] env[62627]: INFO nova.compute.manager [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Terminating instance [ 1219.199950] env[62627]: DEBUG oslo_vmware.api [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Task: {'id': task-2195225, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.826809} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.200348] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1219.200578] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1219.200858] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1219.273473] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1219.273880] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37c29444-ea9d-4885-877d-6683a5a90cd2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.291918] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc9c266-b4a0-4b45-9c83-243942ef339d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.322772] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195223, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.347174] env[62627]: ERROR nova.compute.manager [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Failed to detach volume 5211d219-5b96-4822-bdba-9d41bc2363b9 from /dev/sda: nova.exception.InstanceNotFound: Instance c25fe185-2d6a-40a8-be06-84ee22a5464e could not be found. [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Traceback (most recent call last): [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self.driver.rebuild(**kwargs) [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] raise NotImplementedError() [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] NotImplementedError [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] During handling of the above exception, another exception occurred: [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Traceback (most recent call last): [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self.driver.detach_volume(context, old_connection_info, [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] return self._volumeops.detach_volume(connection_info, instance) [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self._detach_volume_vmdk(connection_info, instance) [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] stable_ref.fetch_moref(session) [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] nova.exception.InstanceNotFound: Instance c25fe185-2d6a-40a8-be06-84ee22a5464e could not be found. [ 1219.347174] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] [ 1219.505065] env[62627]: DEBUG nova.compute.utils [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Build of instance c25fe185-2d6a-40a8-be06-84ee22a5464e aborted: Failed to rebuild volume backed instance. {{(pid=62627) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1219.507454] env[62627]: ERROR nova.compute.manager [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance c25fe185-2d6a-40a8-be06-84ee22a5464e aborted: Failed to rebuild volume backed instance. [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Traceback (most recent call last): [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self.driver.rebuild(**kwargs) [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] raise NotImplementedError() [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] NotImplementedError [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] During handling of the above exception, another exception occurred: [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Traceback (most recent call last): [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self._detach_root_volume(context, instance, root_bdm) [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] with excutils.save_and_reraise_exception(): [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self.force_reraise() [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] raise self.value [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self.driver.detach_volume(context, old_connection_info, [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] return self._volumeops.detach_volume(connection_info, instance) [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self._detach_volume_vmdk(connection_info, instance) [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] stable_ref.fetch_moref(session) [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] nova.exception.InstanceNotFound: Instance c25fe185-2d6a-40a8-be06-84ee22a5464e could not be found. [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] During handling of the above exception, another exception occurred: [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Traceback (most recent call last): [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 11382, in _error_out_instance_on_exception [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] yield [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1219.507454] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self._do_rebuild_instance_with_claim( [ 1219.508779] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1219.508779] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self._do_rebuild_instance( [ 1219.508779] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1219.508779] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self._rebuild_default_impl(**kwargs) [ 1219.508779] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1219.508779] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] self._rebuild_volume_backed_instance( [ 1219.508779] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1219.508779] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] raise exception.BuildAbortException( [ 1219.508779] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] nova.exception.BuildAbortException: Build of instance c25fe185-2d6a-40a8-be06-84ee22a5464e aborted: Failed to rebuild volume backed instance. [ 1219.508779] env[62627]: ERROR nova.compute.manager [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] [ 1219.542308] env[62627]: DEBUG nova.compute.manager [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1219.542308] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1219.543163] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b1bd7f-3068-42ef-806c-9588f8ab81a8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.555672] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1219.556986] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-729baa74-aa95-4ed2-a50d-9c3abbe8a4e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.569384] env[62627]: DEBUG oslo_vmware.api [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1219.569384] env[62627]: value = "task-2195226" [ 1219.569384] env[62627]: _type = "Task" [ 1219.569384] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.585394] env[62627]: DEBUG oslo_vmware.api [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.715367] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquiring lock "4a4b57c8-251c-4ca8-b662-b96214110979" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.715698] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Lock "4a4b57c8-251c-4ca8-b662-b96214110979" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.827115] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195223, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.081627] env[62627]: DEBUG oslo_vmware.api [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.219601] env[62627]: DEBUG nova.compute.manager [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1220.326945] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195223, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.582334] env[62627]: DEBUG oslo_vmware.api [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195226, 'name': PowerOffVM_Task, 'duration_secs': 0.960604} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.583047] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1220.583124] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1220.583352] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4e30684-119c-4fca-a0fb-d86d8b65ae7f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.659075] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1220.659075] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1220.659075] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleting the datastore file [datastore2] d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1220.659291] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-931f5e39-ea3b-427b-b721-a113aa478944 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.678312] env[62627]: DEBUG oslo_vmware.api [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1220.678312] env[62627]: value = "task-2195228" [ 1220.678312] env[62627]: _type = "Task" [ 1220.678312] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.688829] env[62627]: DEBUG oslo_vmware.api [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.746397] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.746691] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.748930] env[62627]: INFO nova.compute.claims [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1220.824154] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195223, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.747971} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.824154] env[62627]: INFO nova.virt.vmwareapi.ds_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_51ad4c3a-902f-470e-bfe7-61161a046617/OSTACK_IMG_51ad4c3a-902f-470e-bfe7-61161a046617.vmdk to [datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3/0d9bb31a-1d73-47ee-a566-e579a137eff3.vmdk. [ 1220.824154] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Cleaning up location [datastore2] OSTACK_IMG_51ad4c3a-902f-470e-bfe7-61161a046617 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1220.824154] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_51ad4c3a-902f-470e-bfe7-61161a046617 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1220.824360] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04a0729c-89a3-47d3-83f5-6aead5e049df {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.833471] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1220.833471] env[62627]: value = "task-2195229" [ 1220.833471] env[62627]: _type = "Task" [ 1220.833471] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.846771] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.063755] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.064323] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.064653] env[62627]: DEBUG nova.compute.manager [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Going to confirm migration 6 {{(pid=62627) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1221.189281] env[62627]: DEBUG oslo_vmware.api [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.386772} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.189577] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1221.189742] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1221.189921] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1221.190139] env[62627]: INFO nova.compute.manager [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1221.190401] env[62627]: DEBUG oslo.service.loopingcall [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1221.190602] env[62627]: DEBUG nova.compute.manager [-] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1221.190697] env[62627]: DEBUG nova.network.neutron [-] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1221.255097] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquiring lock "f2eefd04-2e4d-48b2-9895-ae58070db970" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.255335] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Lock "f2eefd04-2e4d-48b2-9895-ae58070db970" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.346274] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212887} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.346588] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1221.346792] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3/0d9bb31a-1d73-47ee-a566-e579a137eff3.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1221.347105] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3/0d9bb31a-1d73-47ee-a566-e579a137eff3.vmdk to [datastore2] b282093d-8013-436d-94d2-bc26161456ea/b282093d-8013-436d-94d2-bc26161456ea.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1221.347400] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3ff0c1e-8629-4868-8864-c1869270eb9a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.355179] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1221.355179] env[62627]: value = "task-2195230" [ 1221.355179] env[62627]: _type = "Task" [ 1221.355179] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.363683] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.524846] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.672544] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1221.672740] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.672923] env[62627]: DEBUG nova.network.neutron [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1221.673203] env[62627]: DEBUG nova.objects.instance [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lazy-loading 'info_cache' on Instance uuid 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1221.710590] env[62627]: DEBUG nova.compute.manager [req-53144fd0-58f4-43e5-a3c3-13cf51385fa6 req-8b79fa3f-86ea-4e40-bf9e-48724beca539 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Received event network-vif-deleted-332cfd7e-9934-429a-bc74-275d48f07fdb {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1221.710805] env[62627]: INFO nova.compute.manager [req-53144fd0-58f4-43e5-a3c3-13cf51385fa6 req-8b79fa3f-86ea-4e40-bf9e-48724beca539 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Neutron deleted interface 332cfd7e-9934-429a-bc74-275d48f07fdb; detaching it from the instance and deleting it from the info cache [ 1221.710977] env[62627]: DEBUG nova.network.neutron [req-53144fd0-58f4-43e5-a3c3-13cf51385fa6 req-8b79fa3f-86ea-4e40-bf9e-48724beca539 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.762015] env[62627]: DEBUG nova.compute.manager [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1221.866757] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.007500] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b7f30c-b428-4322-9934-e1e0b1c8a010 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.016075] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11fe1b0-d1b5-4e7a-94ad-728a112a320c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.062370] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f203c4e-a7cd-426d-a7cf-21acde527889 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.071080] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e715605-e252-44f7-9bb7-180bf36d3117 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.098378] env[62627]: DEBUG nova.compute.provider_tree [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1222.191402] env[62627]: DEBUG nova.network.neutron [-] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.213614] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23aaff1a-87ce-4108-aa18-4aa8f2336ec6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.227670] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da32acb2-a8f1-4de0-a64b-0cc2c558bb96 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.270891] env[62627]: DEBUG nova.compute.manager [req-53144fd0-58f4-43e5-a3c3-13cf51385fa6 req-8b79fa3f-86ea-4e40-bf9e-48724beca539 service nova] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Detach interface failed, port_id=332cfd7e-9934-429a-bc74-275d48f07fdb, reason: Instance d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1222.294403] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.371012] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195230, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.583689] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Acquiring lock "c25fe185-2d6a-40a8-be06-84ee22a5464e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.587023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "c25fe185-2d6a-40a8-be06-84ee22a5464e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.587023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Acquiring lock "c25fe185-2d6a-40a8-be06-84ee22a5464e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.587023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "c25fe185-2d6a-40a8-be06-84ee22a5464e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.587023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "c25fe185-2d6a-40a8-be06-84ee22a5464e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.588984] env[62627]: INFO nova.compute.manager [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Terminating instance [ 1222.625981] env[62627]: ERROR nova.scheduler.client.report [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [req-c389b485-08aa-44f3-bfd8-e8257bcd8f24] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c389b485-08aa-44f3-bfd8-e8257bcd8f24"}]} [ 1222.644051] env[62627]: DEBUG nova.scheduler.client.report [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1222.659303] env[62627]: DEBUG nova.scheduler.client.report [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1222.659538] env[62627]: DEBUG nova.compute.provider_tree [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1222.672673] env[62627]: DEBUG nova.scheduler.client.report [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1222.693838] env[62627]: DEBUG nova.scheduler.client.report [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1222.698309] env[62627]: INFO nova.compute.manager [-] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Took 1.51 seconds to deallocate network for instance. [ 1222.874923] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195230, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.929581] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f44b73a-3ef9-4a06-aa16-41fb50f61a0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.941231] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc00b66-8aa0-4e20-bf54-2b2dc00d2e1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.983075] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045b7dd1-a6e3-4d6c-95a5-918d5fcdccdd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.993644] env[62627]: DEBUG nova.network.neutron [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance_info_cache with network_info: [{"id": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "address": "fa:16:3e:c6:8b:aa", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd885a4e0-fb", "ovs_interfaceid": "d885a4e0-fbdd-41cf-9e49-1e16cd392ecf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.995959] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bc2d0a-91da-4fb3-9424-a7766c38b970 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.014032] env[62627]: DEBUG nova.compute.provider_tree [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1223.095091] env[62627]: DEBUG nova.compute.manager [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1223.095091] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3546b0b-9724-46d7-9f66-a7105595d7a1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.109034] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31daf98-191a-40b2-b36f-cb1d80a504a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.146559] env[62627]: WARNING nova.virt.vmwareapi.driver [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance c25fe185-2d6a-40a8-be06-84ee22a5464e could not be found. [ 1223.146850] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1223.147241] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33174984-0993-42a2-ab49-a821bf9511d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.160956] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3880fb-2988-4668-b5aa-0cdbb759e3b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.199228] env[62627]: WARNING nova.virt.vmwareapi.vmops [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c25fe185-2d6a-40a8-be06-84ee22a5464e could not be found. [ 1223.199568] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1223.200534] env[62627]: INFO nova.compute.manager [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Took 0.11 seconds to destroy the instance on the hypervisor. [ 1223.200688] env[62627]: DEBUG oslo.service.loopingcall [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1223.201124] env[62627]: DEBUG nova.compute.manager [-] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1223.201197] env[62627]: DEBUG nova.network.neutron [-] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1223.208629] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.373189] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195230, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.501389] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "refresh_cache-36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1223.503115] env[62627]: DEBUG nova.objects.instance [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lazy-loading 'migration_context' on Instance uuid 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1223.562631] env[62627]: DEBUG nova.scheduler.client.report [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 137 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1223.562761] env[62627]: DEBUG nova.compute.provider_tree [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 137 to 138 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1223.562923] env[62627]: DEBUG nova.compute.provider_tree [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1223.792532] env[62627]: DEBUG nova.compute.manager [req-7d4c84dd-489a-4245-9efd-7948899e9a10 req-0a2a25b4-6160-4ff9-a1c5-bdc6a2c27c17 service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Received event network-vif-deleted-438ff9e4-843e-4e29-b42f-fd893752d160 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1223.792532] env[62627]: INFO nova.compute.manager [req-7d4c84dd-489a-4245-9efd-7948899e9a10 req-0a2a25b4-6160-4ff9-a1c5-bdc6a2c27c17 service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Neutron deleted interface 438ff9e4-843e-4e29-b42f-fd893752d160; detaching it from the instance and deleting it from the info cache [ 1223.795040] env[62627]: DEBUG nova.network.neutron [req-7d4c84dd-489a-4245-9efd-7948899e9a10 req-0a2a25b4-6160-4ff9-a1c5-bdc6a2c27c17 service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.878954] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195230, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.005631] env[62627]: DEBUG nova.objects.base [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Object Instance<36a8d4d3-d6b6-4b42-9514-0e29ee45aea9> lazy-loaded attributes: info_cache,migration_context {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1224.007390] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71bf3d99-11d9-43bd-9f14-6b8e3ffab7f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.036967] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25c070b9-c498-43b0-a571-6c5dced2d5f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.045850] env[62627]: DEBUG oslo_vmware.api [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1224.045850] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fc5ff4-b4e4-8088-1484-565cc383029b" [ 1224.045850] env[62627]: _type = "Task" [ 1224.045850] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.058977] env[62627]: DEBUG oslo_vmware.api [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fc5ff4-b4e4-8088-1484-565cc383029b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.068038] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.321s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.068602] env[62627]: DEBUG nova.compute.manager [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1224.071473] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.547s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.271025] env[62627]: DEBUG nova.network.neutron [-] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.296569] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c22cf2d7-5826-4fa1-afd8-80b629f79d1d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.308722] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa47b23-c7b4-427c-a4fe-62e92648e5ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.320415] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe06ca07-a70a-44c4-bbfb-d3c30b122685 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.329323] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0cf78f9-7128-464d-ab6c-c1528d4edbac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.346058] env[62627]: DEBUG nova.compute.manager [req-7d4c84dd-489a-4245-9efd-7948899e9a10 req-0a2a25b4-6160-4ff9-a1c5-bdc6a2c27c17 service nova] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Detach interface failed, port_id=438ff9e4-843e-4e29-b42f-fd893752d160, reason: Instance c25fe185-2d6a-40a8-be06-84ee22a5464e could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1224.376039] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784be9a3-e7b5-4760-9d36-819212bd4b55 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.386335] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195230, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.995046} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.386712] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/0d9bb31a-1d73-47ee-a566-e579a137eff3/0d9bb31a-1d73-47ee-a566-e579a137eff3.vmdk to [datastore2] b282093d-8013-436d-94d2-bc26161456ea/b282093d-8013-436d-94d2-bc26161456ea.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1224.388507] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a5077c-6ca0-46d4-873b-16fc00ea0fef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.392376] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855fb6d0-b038-473d-99ce-1dbfd8aff20c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.408352] env[62627]: DEBUG nova.compute.provider_tree [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1224.428160] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] b282093d-8013-436d-94d2-bc26161456ea/b282093d-8013-436d-94d2-bc26161456ea.vmdk or device None with type streamOptimized {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1224.430830] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7b56d8c-c033-481f-8756-31e6521ac7d1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.451169] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1224.451169] env[62627]: value = "task-2195231" [ 1224.451169] env[62627]: _type = "Task" [ 1224.451169] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.459655] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.470622] env[62627]: DEBUG nova.scheduler.client.report [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 138 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1224.470863] env[62627]: DEBUG nova.compute.provider_tree [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 138 to 139 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1224.471061] env[62627]: DEBUG nova.compute.provider_tree [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1224.556706] env[62627]: DEBUG oslo_vmware.api [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fc5ff4-b4e4-8088-1484-565cc383029b, 'name': SearchDatastore_Task, 'duration_secs': 0.062961} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.557014] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1224.576589] env[62627]: DEBUG nova.compute.utils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1224.577974] env[62627]: DEBUG nova.compute.manager [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1224.578165] env[62627]: DEBUG nova.network.neutron [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1224.627677] env[62627]: DEBUG nova.policy [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c10adddb55ae4165abff78f71fdc5cb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65aa68a50f0b4f44855294959300bd33', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1224.773796] env[62627]: INFO nova.compute.manager [-] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Took 1.57 seconds to deallocate network for instance. [ 1224.903410] env[62627]: DEBUG nova.network.neutron [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Successfully created port: 200bba8f-ca22-4dfa-9492-f4d2f2e6f212 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1224.963955] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195231, 'name': ReconfigVM_Task, 'duration_secs': 0.385568} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.963955] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Reconfigured VM instance instance-00000058 to attach disk [datastore2] b282093d-8013-436d-94d2-bc26161456ea/b282093d-8013-436d-94d2-bc26161456ea.vmdk or device None with type streamOptimized {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1224.963955] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93c1641a-7704-414f-84cc-6a7ebc1ddfbc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.969881] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1224.969881] env[62627]: value = "task-2195232" [ 1224.969881] env[62627]: _type = "Task" [ 1224.969881] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.977100] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.904s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.977100] env[62627]: INFO nova.compute.manager [None req-7d3a47c1-7fb1-4202-ab97-acfff8aedb4c tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Successfully reverted task state from rebuilding on failure for instance. [ 1224.981814] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.688s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.983491] env[62627]: INFO nova.compute.claims [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1224.991938] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195232, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.086501] env[62627]: DEBUG nova.compute.manager [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1225.324868] env[62627]: INFO nova.compute.manager [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Took 0.55 seconds to detach 1 volumes for instance. [ 1225.327108] env[62627]: DEBUG nova.compute.manager [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Deleting volume: 5211d219-5b96-4822-bdba-9d41bc2363b9 {{(pid=62627) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1225.481357] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195232, 'name': Rename_Task, 'duration_secs': 0.150722} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.481634] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1225.481885] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ad18d1b-a123-4519-ac89-5d1af9b0de52 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.491549] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1225.491549] env[62627]: value = "task-2195234" [ 1225.491549] env[62627]: _type = "Task" [ 1225.491549] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.502988] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195234, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.870066] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.007608] env[62627]: DEBUG oslo_vmware.api [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195234, 'name': PowerOnVM_Task, 'duration_secs': 0.508341} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.008134] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1226.093504] env[62627]: DEBUG nova.compute.manager [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1226.116786] env[62627]: DEBUG nova.virt.hardware [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1226.117082] env[62627]: DEBUG nova.virt.hardware [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1226.117251] env[62627]: DEBUG nova.virt.hardware [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1226.117437] env[62627]: DEBUG nova.virt.hardware [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1226.117580] env[62627]: DEBUG nova.virt.hardware [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1226.117728] env[62627]: DEBUG nova.virt.hardware [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1226.118200] env[62627]: DEBUG nova.virt.hardware [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1226.118755] env[62627]: DEBUG nova.virt.hardware [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1226.118755] env[62627]: DEBUG nova.virt.hardware [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1226.118884] env[62627]: DEBUG nova.virt.hardware [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1226.119160] env[62627]: DEBUG nova.virt.hardware [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1226.120135] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff51533-b0c6-4f9d-82cf-45d34c4b8cbb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.127204] env[62627]: DEBUG nova.compute.manager [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1226.128176] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be1f616-6c22-4d26-b37d-22c4ffb176a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.138381] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5805a20-24ff-4ca0-ab16-6f8dd2116774 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.210551] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da8f0a4-7713-4c14-9d79-82cae155f418 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.219744] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0156d87a-b0e1-481a-a956-538fb983ab03 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.252725] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7246324d-183d-48e6-8a2e-41c9338d3757 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.261312] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fdc85b-c705-49b1-bc05-a16df40750bf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.279020] env[62627]: DEBUG nova.compute.provider_tree [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.658450] env[62627]: DEBUG oslo_concurrency.lockutils [None req-db6fe6e2-1a5b-4694-ab8a-c03b03dea031 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "b282093d-8013-436d-94d2-bc26161456ea" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 27.505s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.779036] env[62627]: DEBUG nova.scheduler.client.report [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1226.789561] env[62627]: DEBUG nova.compute.manager [req-c66d54ec-d574-44da-ab51-10340fe26d87 req-3695031e-9cc6-4c46-840f-20a219128dd4 service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Received event network-vif-plugged-200bba8f-ca22-4dfa-9492-f4d2f2e6f212 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1226.789775] env[62627]: DEBUG oslo_concurrency.lockutils [req-c66d54ec-d574-44da-ab51-10340fe26d87 req-3695031e-9cc6-4c46-840f-20a219128dd4 service nova] Acquiring lock "4a4b57c8-251c-4ca8-b662-b96214110979-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.789993] env[62627]: DEBUG oslo_concurrency.lockutils [req-c66d54ec-d574-44da-ab51-10340fe26d87 req-3695031e-9cc6-4c46-840f-20a219128dd4 service nova] Lock "4a4b57c8-251c-4ca8-b662-b96214110979-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.790263] env[62627]: DEBUG oslo_concurrency.lockutils [req-c66d54ec-d574-44da-ab51-10340fe26d87 req-3695031e-9cc6-4c46-840f-20a219128dd4 service nova] Lock "4a4b57c8-251c-4ca8-b662-b96214110979-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.790346] env[62627]: DEBUG nova.compute.manager [req-c66d54ec-d574-44da-ab51-10340fe26d87 req-3695031e-9cc6-4c46-840f-20a219128dd4 service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] No waiting events found dispatching network-vif-plugged-200bba8f-ca22-4dfa-9492-f4d2f2e6f212 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1226.790487] env[62627]: WARNING nova.compute.manager [req-c66d54ec-d574-44da-ab51-10340fe26d87 req-3695031e-9cc6-4c46-840f-20a219128dd4 service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Received unexpected event network-vif-plugged-200bba8f-ca22-4dfa-9492-f4d2f2e6f212 for instance with vm_state building and task_state spawning. [ 1226.912794] env[62627]: DEBUG nova.network.neutron [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Successfully updated port: 200bba8f-ca22-4dfa-9492-f4d2f2e6f212 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1227.284729] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.303s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.285272] env[62627]: DEBUG nova.compute.manager [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1227.288226] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.080s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.288579] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.291347] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.734s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.324234] env[62627]: INFO nova.scheduler.client.report [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleted allocations for instance d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93 [ 1227.415415] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquiring lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1227.415600] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquired lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.415716] env[62627]: DEBUG nova.network.neutron [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1227.795394] env[62627]: DEBUG nova.compute.utils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1227.800110] env[62627]: DEBUG nova.compute.manager [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1227.800300] env[62627]: DEBUG nova.network.neutron [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1227.831887] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8d983870-b855-437a-870d-14bed5fd002a tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.803s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.888727] env[62627]: DEBUG nova.policy [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c5900db519d4c09a4a9d3f9c42dc136', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '625cd7e476754ce2b16cd6513f733e8a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1227.994564] env[62627]: DEBUG nova.network.neutron [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1228.002401] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc49e25-9697-496c-9fff-7e2a25d6943d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.013336] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559fd304-0ece-4fea-8f64-bcd36b35fe0b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.057957] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d42f11a-f77c-4ddc-82f7-5a8d7a22f339 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.068734] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906fa389-5a96-4239-bba0-88adbdd0a826 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.084720] env[62627]: DEBUG nova.compute.provider_tree [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.208869] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c96dafe-9582-4fc1-a3a6-23b6554f5093 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.218107] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b6636328-101f-44b5-b0bf-a108f9abcf4b tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Suspending the VM {{(pid=62627) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1228.218375] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d54ac837-31e2-400c-ad63-8ca3898a8378 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.226488] env[62627]: DEBUG oslo_vmware.api [None req-b6636328-101f-44b5-b0bf-a108f9abcf4b tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1228.226488] env[62627]: value = "task-2195235" [ 1228.226488] env[62627]: _type = "Task" [ 1228.226488] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.236066] env[62627]: DEBUG oslo_vmware.api [None req-b6636328-101f-44b5-b0bf-a108f9abcf4b tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195235, 'name': SuspendVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.301337] env[62627]: DEBUG nova.compute.manager [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1228.332717] env[62627]: DEBUG nova.network.neutron [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Updating instance_info_cache with network_info: [{"id": "200bba8f-ca22-4dfa-9492-f4d2f2e6f212", "address": "fa:16:3e:53:df:4c", "network": {"id": "a684ed8e-4084-4a9e-b3c8-7a17d7827291", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1686789109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65aa68a50f0b4f44855294959300bd33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap200bba8f-ca", "ovs_interfaceid": "200bba8f-ca22-4dfa-9492-f4d2f2e6f212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.449202] env[62627]: DEBUG nova.network.neutron [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Successfully created port: 98d518c4-51bc-4451-a4d2-6bd21f656d3d {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1228.589137] env[62627]: DEBUG nova.scheduler.client.report [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1228.745668] env[62627]: DEBUG oslo_vmware.api [None req-b6636328-101f-44b5-b0bf-a108f9abcf4b tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195235, 'name': SuspendVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.816500] env[62627]: DEBUG nova.compute.manager [req-b8239902-b069-4406-a469-417cf8ef186d req-63a2407f-a6d6-465d-ad0b-a5770befeeee service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Received event network-changed-200bba8f-ca22-4dfa-9492-f4d2f2e6f212 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1228.816753] env[62627]: DEBUG nova.compute.manager [req-b8239902-b069-4406-a469-417cf8ef186d req-63a2407f-a6d6-465d-ad0b-a5770befeeee service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Refreshing instance network info cache due to event network-changed-200bba8f-ca22-4dfa-9492-f4d2f2e6f212. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1228.816906] env[62627]: DEBUG oslo_concurrency.lockutils [req-b8239902-b069-4406-a469-417cf8ef186d req-63a2407f-a6d6-465d-ad0b-a5770befeeee service nova] Acquiring lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1228.838798] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Releasing lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1228.838798] env[62627]: DEBUG nova.compute.manager [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Instance network_info: |[{"id": "200bba8f-ca22-4dfa-9492-f4d2f2e6f212", "address": "fa:16:3e:53:df:4c", "network": {"id": "a684ed8e-4084-4a9e-b3c8-7a17d7827291", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1686789109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65aa68a50f0b4f44855294959300bd33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap200bba8f-ca", "ovs_interfaceid": "200bba8f-ca22-4dfa-9492-f4d2f2e6f212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1228.839349] env[62627]: DEBUG oslo_concurrency.lockutils [req-b8239902-b069-4406-a469-417cf8ef186d req-63a2407f-a6d6-465d-ad0b-a5770befeeee service nova] Acquired lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.839461] env[62627]: DEBUG nova.network.neutron [req-b8239902-b069-4406-a469-417cf8ef186d req-63a2407f-a6d6-465d-ad0b-a5770befeeee service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Refreshing network info cache for port 200bba8f-ca22-4dfa-9492-f4d2f2e6f212 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1228.841738] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:df:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9ee6f9-33be-4f58-8248-694024ec31d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '200bba8f-ca22-4dfa-9492-f4d2f2e6f212', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1228.850683] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Creating folder: Project (65aa68a50f0b4f44855294959300bd33). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1228.853800] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4300a19a-a106-4875-9c17-8f116b833f21 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.856148] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.856718] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.856845] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1228.872362] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Created folder: Project (65aa68a50f0b4f44855294959300bd33) in parent group-v447541. [ 1228.872362] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Creating folder: Instances. Parent ref: group-v447815. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1228.872362] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7cf86f8a-d504-44e3-bc53-5e4eba9ceeb4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.885647] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Created folder: Instances in parent group-v447815. [ 1228.885911] env[62627]: DEBUG oslo.service.loopingcall [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1228.886502] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1228.886750] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ed86c68-8194-4202-9523-bf0a7472e30f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.908810] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1228.908810] env[62627]: value = "task-2195238" [ 1228.908810] env[62627]: _type = "Task" [ 1228.908810] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.918386] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195238, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.032425] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "1593ca50-bc97-464a-86ca-17995db15758" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.032425] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.239383] env[62627]: DEBUG oslo_vmware.api [None req-b6636328-101f-44b5-b0bf-a108f9abcf4b tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195235, 'name': SuspendVM_Task, 'duration_secs': 0.718502} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.239653] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b6636328-101f-44b5-b0bf-a108f9abcf4b tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Suspended the VM {{(pid=62627) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1229.239830] env[62627]: DEBUG nova.compute.manager [None req-b6636328-101f-44b5-b0bf-a108f9abcf4b tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1229.241092] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67867b2-d3ad-44fb-8015-1ee9b1c9478a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.311692] env[62627]: DEBUG nova.compute.manager [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1229.350176] env[62627]: DEBUG nova.virt.hardware [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1229.351072] env[62627]: DEBUG nova.virt.hardware [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1229.351072] env[62627]: DEBUG nova.virt.hardware [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1229.351072] env[62627]: DEBUG nova.virt.hardware [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1229.351282] env[62627]: DEBUG nova.virt.hardware [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1229.351450] env[62627]: DEBUG nova.virt.hardware [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1229.351667] env[62627]: DEBUG nova.virt.hardware [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1229.351872] env[62627]: DEBUG nova.virt.hardware [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1229.352033] env[62627]: DEBUG nova.virt.hardware [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1229.352288] env[62627]: DEBUG nova.virt.hardware [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1229.352568] env[62627]: DEBUG nova.virt.hardware [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1229.358017] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9348c647-d565-4aea-ac5c-af4e1cbbaa07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.365321] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed915e8-f982-493b-b73f-e70568d5e000 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.420453] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195238, 'name': CreateVM_Task, 'duration_secs': 0.362909} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.420753] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1229.421446] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1229.421658] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.422038] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1229.422332] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31892eb2-7fd7-4d57-b8e7-80a504b8caf9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.427802] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for the task: (returnval){ [ 1229.427802] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522ae3f8-099c-1f48-2d81-9a184d6a5a39" [ 1229.427802] env[62627]: _type = "Task" [ 1229.427802] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.438575] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522ae3f8-099c-1f48-2d81-9a184d6a5a39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.535494] env[62627]: DEBUG nova.compute.manager [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1229.602397] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.311s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.606497] env[62627]: DEBUG nova.network.neutron [req-b8239902-b069-4406-a469-417cf8ef186d req-63a2407f-a6d6-465d-ad0b-a5770befeeee service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Updated VIF entry in instance network info cache for port 200bba8f-ca22-4dfa-9492-f4d2f2e6f212. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1229.607213] env[62627]: DEBUG nova.network.neutron [req-b8239902-b069-4406-a469-417cf8ef186d req-63a2407f-a6d6-465d-ad0b-a5770befeeee service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Updating instance_info_cache with network_info: [{"id": "200bba8f-ca22-4dfa-9492-f4d2f2e6f212", "address": "fa:16:3e:53:df:4c", "network": {"id": "a684ed8e-4084-4a9e-b3c8-7a17d7827291", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1686789109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65aa68a50f0b4f44855294959300bd33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap200bba8f-ca", "ovs_interfaceid": "200bba8f-ca22-4dfa-9492-f4d2f2e6f212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.608042] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.738s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.608255] env[62627]: DEBUG nova.objects.instance [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lazy-loading 'resources' on Instance uuid c25fe185-2d6a-40a8-be06-84ee22a5464e {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1229.923882] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1229.923882] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquired lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.923882] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Forcefully refreshing network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1229.941150] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522ae3f8-099c-1f48-2d81-9a184d6a5a39, 'name': SearchDatastore_Task, 'duration_secs': 0.01727} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.941477] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1229.941730] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1229.941976] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1229.942182] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.942331] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1229.942606] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccdbd755-8eec-48b9-8eac-b865319ea237 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.953212] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1229.953405] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1229.954170] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09f28617-3deb-4414-bc4d-a604e0b679e5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.960954] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for the task: (returnval){ [ 1229.960954] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3de8a-82d2-e485-bf96-569a08ce1817" [ 1229.960954] env[62627]: _type = "Task" [ 1229.960954] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.971201] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3de8a-82d2-e485-bf96-569a08ce1817, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.066411] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.114603] env[62627]: DEBUG oslo_concurrency.lockutils [req-b8239902-b069-4406-a469-417cf8ef186d req-63a2407f-a6d6-465d-ad0b-a5770befeeee service nova] Releasing lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1230.186041] env[62627]: DEBUG nova.network.neutron [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Successfully updated port: 98d518c4-51bc-4451-a4d2-6bd21f656d3d {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1230.205200] env[62627]: INFO nova.scheduler.client.report [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted allocation for migration 1c5f3704-de8e-4996-98a8-086c50a3c0d7 [ 1230.360257] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b42cd7-b4e7-474d-b3ff-8c36473a7ed5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.372845] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a323384e-5960-4af0-950b-1e437b4c6ca7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.411281] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13580e6-0702-4564-84e7-947fcac6aba8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.426036] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c535175-57aa-42a0-87ea-346e270b5e21 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.447251] env[62627]: DEBUG nova.compute.provider_tree [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.472457] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3de8a-82d2-e485-bf96-569a08ce1817, 'name': SearchDatastore_Task, 'duration_secs': 0.010319} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.473413] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c443b839-82ef-4e5e-9b34-9eeb92a0cc5d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.481064] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for the task: (returnval){ [ 1230.481064] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521c7d51-cee4-b341-0ddf-d454e15b95e3" [ 1230.481064] env[62627]: _type = "Task" [ 1230.481064] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.494215] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521c7d51-cee4-b341-0ddf-d454e15b95e3, 'name': SearchDatastore_Task, 'duration_secs': 0.010125} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.494705] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1230.495132] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 4a4b57c8-251c-4ca8-b662-b96214110979/4a4b57c8-251c-4ca8-b662-b96214110979.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1230.495617] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1e04009-a23a-4982-84f2-ec6e86c7e157 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.504167] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for the task: (returnval){ [ 1230.504167] env[62627]: value = "task-2195239" [ 1230.504167] env[62627]: _type = "Task" [ 1230.504167] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.513025] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195239, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.689905] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquiring lock "refresh_cache-f2eefd04-2e4d-48b2-9895-ae58070db970" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1230.690062] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquired lock "refresh_cache-f2eefd04-2e4d-48b2-9895-ae58070db970" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.690263] env[62627]: DEBUG nova.network.neutron [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1230.716809] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ccc29339-3d27-4102-bee3-6e838047e319 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.652s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.735698] env[62627]: INFO nova.compute.manager [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Resuming [ 1230.736453] env[62627]: DEBUG nova.objects.instance [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lazy-loading 'flavor' on Instance uuid b282093d-8013-436d-94d2-bc26161456ea {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.851652] env[62627]: DEBUG nova.compute.manager [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Received event network-vif-plugged-98d518c4-51bc-4451-a4d2-6bd21f656d3d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1230.851873] env[62627]: DEBUG oslo_concurrency.lockutils [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] Acquiring lock "f2eefd04-2e4d-48b2-9895-ae58070db970-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.852459] env[62627]: DEBUG oslo_concurrency.lockutils [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] Lock "f2eefd04-2e4d-48b2-9895-ae58070db970-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.852651] env[62627]: DEBUG oslo_concurrency.lockutils [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] Lock "f2eefd04-2e4d-48b2-9895-ae58070db970-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.852808] env[62627]: DEBUG nova.compute.manager [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] No waiting events found dispatching network-vif-plugged-98d518c4-51bc-4451-a4d2-6bd21f656d3d {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1230.852981] env[62627]: WARNING nova.compute.manager [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Received unexpected event network-vif-plugged-98d518c4-51bc-4451-a4d2-6bd21f656d3d for instance with vm_state building and task_state spawning. [ 1230.854808] env[62627]: DEBUG nova.compute.manager [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Received event network-changed-98d518c4-51bc-4451-a4d2-6bd21f656d3d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1230.854887] env[62627]: DEBUG nova.compute.manager [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Refreshing instance network info cache due to event network-changed-98d518c4-51bc-4451-a4d2-6bd21f656d3d. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1230.855219] env[62627]: DEBUG oslo_concurrency.lockutils [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] Acquiring lock "refresh_cache-f2eefd04-2e4d-48b2-9895-ae58070db970" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1230.925314] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.925778] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.925979] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.926587] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.926587] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.929019] env[62627]: INFO nova.compute.manager [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Terminating instance [ 1230.951405] env[62627]: DEBUG nova.scheduler.client.report [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1231.014219] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195239, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475738} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.014484] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 4a4b57c8-251c-4ca8-b662-b96214110979/4a4b57c8-251c-4ca8-b662-b96214110979.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1231.014694] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1231.014950] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8f0fc80-e167-4a8c-adc8-73b8e2876631 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.021733] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for the task: (returnval){ [ 1231.021733] env[62627]: value = "task-2195240" [ 1231.021733] env[62627]: _type = "Task" [ 1231.021733] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.030735] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195240, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.205865] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Updating instance_info_cache with network_info: [{"id": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "address": "fa:16:3e:66:a3:d6", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0e730ac-28", "ovs_interfaceid": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.225061] env[62627]: DEBUG nova.network.neutron [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1231.360931] env[62627]: DEBUG nova.network.neutron [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Updating instance_info_cache with network_info: [{"id": "98d518c4-51bc-4451-a4d2-6bd21f656d3d", "address": "fa:16:3e:8f:d5:b2", "network": {"id": "06831bec-3f6a-4856-8568-a06ea6819606", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1749970615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "625cd7e476754ce2b16cd6513f733e8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98d518c4-51", "ovs_interfaceid": "98d518c4-51bc-4451-a4d2-6bd21f656d3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.433384] env[62627]: DEBUG nova.compute.manager [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1231.433627] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1231.435034] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fec29a2-ec5c-4e83-ad4d-be00980008e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.444811] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1231.444811] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0410350-ede4-41e4-ab69-777353f1e2ad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.450750] env[62627]: DEBUG oslo_vmware.api [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1231.450750] env[62627]: value = "task-2195241" [ 1231.450750] env[62627]: _type = "Task" [ 1231.450750] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.460616] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.853s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.463312] env[62627]: DEBUG oslo_vmware.api [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195241, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.464210] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.398s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.465454] env[62627]: INFO nova.compute.claims [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1231.515449] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "f195662c-9493-469a-ba36-d712f9944b28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.515690] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "f195662c-9493-469a-ba36-d712f9944b28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.533801] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195240, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064341} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.534094] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1231.534870] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994f6c7c-9d84-4b7a-b2a5-7cdba152e2ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.560058] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 4a4b57c8-251c-4ca8-b662-b96214110979/4a4b57c8-251c-4ca8-b662-b96214110979.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1231.560058] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3a065ca-91fb-4368-95f5-edf74afe6c00 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.580928] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for the task: (returnval){ [ 1231.580928] env[62627]: value = "task-2195242" [ 1231.580928] env[62627]: _type = "Task" [ 1231.580928] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.589454] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195242, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.708188] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Releasing lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1231.708417] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Updated the network info_cache for instance {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1231.708634] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.708819] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.708975] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.709189] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.709338] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.709487] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.709616] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1231.709760] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.863215] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Releasing lock "refresh_cache-f2eefd04-2e4d-48b2-9895-ae58070db970" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1231.863644] env[62627]: DEBUG nova.compute.manager [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Instance network_info: |[{"id": "98d518c4-51bc-4451-a4d2-6bd21f656d3d", "address": "fa:16:3e:8f:d5:b2", "network": {"id": "06831bec-3f6a-4856-8568-a06ea6819606", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1749970615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "625cd7e476754ce2b16cd6513f733e8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98d518c4-51", "ovs_interfaceid": "98d518c4-51bc-4451-a4d2-6bd21f656d3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1231.863989] env[62627]: DEBUG oslo_concurrency.lockutils [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] Acquired lock "refresh_cache-f2eefd04-2e4d-48b2-9895-ae58070db970" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.864227] env[62627]: DEBUG nova.network.neutron [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Refreshing network info cache for port 98d518c4-51bc-4451-a4d2-6bd21f656d3d {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1231.865456] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:d5:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fc48e29b-113c-4849-850c-35435eab4052', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98d518c4-51bc-4451-a4d2-6bd21f656d3d', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1231.872781] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Creating folder: Project (625cd7e476754ce2b16cd6513f733e8a). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1231.876107] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8dd993df-2c4b-47a5-bd31-67606c483d9f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.890872] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Created folder: Project (625cd7e476754ce2b16cd6513f733e8a) in parent group-v447541. [ 1231.890872] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Creating folder: Instances. Parent ref: group-v447818. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1231.890872] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-204b50dd-913e-4e66-8107-a4a22eff0ba9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.899906] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Created folder: Instances in parent group-v447818. [ 1231.900167] env[62627]: DEBUG oslo.service.loopingcall [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1231.900500] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1231.900611] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b93608d4-11dc-4619-98d0-404634e2cbe6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.923409] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1231.923409] env[62627]: value = "task-2195245" [ 1231.923409] env[62627]: _type = "Task" [ 1231.923409] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.931789] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195245, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.962089] env[62627]: DEBUG oslo_vmware.api [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195241, 'name': PowerOffVM_Task, 'duration_secs': 0.256603} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.965823] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1231.966009] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1231.966294] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0315d3ce-22d9-4d4e-9bf0-9fe7514bb819 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.981291] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aaba5e8-a913-4389-8d36-93d888907507 tempest-ServerActionsV293TestJSON-856706501 tempest-ServerActionsV293TestJSON-856706501-project-member] Lock "c25fe185-2d6a-40a8-be06-84ee22a5464e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.396s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.017578] env[62627]: DEBUG nova.compute.manager [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1232.036460] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1232.036631] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1232.036779] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleting the datastore file [datastore2] 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1232.037038] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01dc9074-53f9-4e00-9ee3-7de3a37a063e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.047266] env[62627]: DEBUG oslo_vmware.api [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1232.047266] env[62627]: value = "task-2195247" [ 1232.047266] env[62627]: _type = "Task" [ 1232.047266] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.056019] env[62627]: DEBUG oslo_vmware.api [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195247, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.091057] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195242, 'name': ReconfigVM_Task, 'duration_secs': 0.276847} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.091299] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 4a4b57c8-251c-4ca8-b662-b96214110979/4a4b57c8-251c-4ca8-b662-b96214110979.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1232.092032] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-caf03022-8fa3-439c-9486-cadeffa29135 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.101836] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for the task: (returnval){ [ 1232.101836] env[62627]: value = "task-2195248" [ 1232.101836] env[62627]: _type = "Task" [ 1232.101836] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.111996] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195248, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.132148] env[62627]: DEBUG nova.network.neutron [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Updated VIF entry in instance network info cache for port 98d518c4-51bc-4451-a4d2-6bd21f656d3d. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1232.132532] env[62627]: DEBUG nova.network.neutron [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Updating instance_info_cache with network_info: [{"id": "98d518c4-51bc-4451-a4d2-6bd21f656d3d", "address": "fa:16:3e:8f:d5:b2", "network": {"id": "06831bec-3f6a-4856-8568-a06ea6819606", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1749970615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "625cd7e476754ce2b16cd6513f733e8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fc48e29b-113c-4849-850c-35435eab4052", "external-id": "nsx-vlan-transportzone-958", "segmentation_id": 958, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98d518c4-51", "ovs_interfaceid": "98d518c4-51bc-4451-a4d2-6bd21f656d3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.212594] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.249941] env[62627]: DEBUG oslo_concurrency.lockutils [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.249941] env[62627]: DEBUG oslo_concurrency.lockutils [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquired lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.249941] env[62627]: DEBUG nova.network.neutron [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1232.434049] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195245, 'name': CreateVM_Task, 'duration_secs': 0.353656} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.434049] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1232.434762] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.434928] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.435259] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1232.435507] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7690eb44-4001-4647-93d5-9fd8af0dd083 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.441069] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for the task: (returnval){ [ 1232.441069] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3d8a5-0161-fea3-f8d0-01cdfe80c84e" [ 1232.441069] env[62627]: _type = "Task" [ 1232.441069] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.449935] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3d8a5-0161-fea3-f8d0-01cdfe80c84e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.537263] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.557539] env[62627]: DEBUG oslo_vmware.api [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195247, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163792} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.560062] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1232.560256] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1232.560435] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1232.560607] env[62627]: INFO nova.compute.manager [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1232.560852] env[62627]: DEBUG oslo.service.loopingcall [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1232.561248] env[62627]: DEBUG nova.compute.manager [-] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1232.561347] env[62627]: DEBUG nova.network.neutron [-] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1232.613080] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195248, 'name': Rename_Task, 'duration_secs': 0.19445} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.613348] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1232.613593] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf63d68f-e872-4aef-92a6-72ade3f2d878 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.621077] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for the task: (returnval){ [ 1232.621077] env[62627]: value = "task-2195249" [ 1232.621077] env[62627]: _type = "Task" [ 1232.621077] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.635597] env[62627]: DEBUG oslo_concurrency.lockutils [req-41b22161-a588-4537-bda8-30c286874b71 req-a3bb194b-8e07-48b0-be67-dfa4929cb3d5 service nova] Releasing lock "refresh_cache-f2eefd04-2e4d-48b2-9895-ae58070db970" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1232.639013] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.687230] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e2f0b1-4961-4f9d-a7b1-bc484e049ddd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.698729] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c7dff6-9e12-4100-ad17-d507661b131c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.752114] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "interface-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-1034439a-d24f-4eb6-8139-67b012254146" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.752431] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-1034439a-d24f-4eb6-8139-67b012254146" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.752791] env[62627]: DEBUG nova.objects.instance [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'flavor' on Instance uuid c7e3246d-ffe7-4d81-a2ec-c5878649ed6b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1232.761464] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b40163f-cc2b-4cd2-bb7a-dd7848f1372e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.767837] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acda5415-5920-4935-a2d5-7359a72dac8c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.783557] env[62627]: DEBUG nova.compute.provider_tree [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1232.932955] env[62627]: DEBUG nova.compute.manager [req-f4363ff9-fb1f-43c5-aca1-6089ba1fa31e req-41fa9473-ce26-455e-8894-b9f01461b237 service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Received event network-vif-deleted-d885a4e0-fbdd-41cf-9e49-1e16cd392ecf {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1232.933214] env[62627]: INFO nova.compute.manager [req-f4363ff9-fb1f-43c5-aca1-6089ba1fa31e req-41fa9473-ce26-455e-8894-b9f01461b237 service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Neutron deleted interface d885a4e0-fbdd-41cf-9e49-1e16cd392ecf; detaching it from the instance and deleting it from the info cache [ 1232.933423] env[62627]: DEBUG nova.network.neutron [req-f4363ff9-fb1f-43c5-aca1-6089ba1fa31e req-41fa9473-ce26-455e-8894-b9f01461b237 service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.956947] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b3d8a5-0161-fea3-f8d0-01cdfe80c84e, 'name': SearchDatastore_Task, 'duration_secs': 0.010026} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.957254] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1232.957482] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1232.957711] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.957858] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.958230] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1232.958512] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95b457d3-22ae-4a90-b40c-b87e63865dd9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.968372] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1232.968553] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1232.969406] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b385c022-083f-4bf4-9497-7ef872b5ee88 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.975186] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for the task: (returnval){ [ 1232.975186] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5258933e-83b4-448b-3f94-b36ea38f5d53" [ 1232.975186] env[62627]: _type = "Task" [ 1232.975186] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.983786] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5258933e-83b4-448b-3f94-b36ea38f5d53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.992453] env[62627]: DEBUG nova.network.neutron [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updating instance_info_cache with network_info: [{"id": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "address": "fa:16:3e:85:08:41", "network": {"id": "280614ef-fbde-41d9-927f-7471cc50aa14", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-970093695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d5055e1a674886a161a96b0e095451", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8233b50c-be21-441a-a357-a29138a95b8b", "external-id": "nsx-vlan-transportzone-496", "segmentation_id": 496, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap823150e9-f3", "ovs_interfaceid": "823150e9-f3ee-44ab-a36b-c8abdca8b70e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1233.131153] env[62627]: DEBUG oslo_vmware.api [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195249, 'name': PowerOnVM_Task, 'duration_secs': 0.465342} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.131422] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1233.131621] env[62627]: INFO nova.compute.manager [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Took 7.04 seconds to spawn the instance on the hypervisor. [ 1233.131798] env[62627]: DEBUG nova.compute.manager [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1233.132870] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178a956f-c869-4872-aef4-f71be9a905df {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.296887] env[62627]: DEBUG nova.network.neutron [-] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1233.311559] env[62627]: ERROR nova.scheduler.client.report [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [req-be865e8a-1f63-4f7c-baca-f93d84fcca15] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-be865e8a-1f63-4f7c-baca-f93d84fcca15"}]} [ 1233.327398] env[62627]: DEBUG nova.scheduler.client.report [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1233.341120] env[62627]: DEBUG nova.scheduler.client.report [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1233.341343] env[62627]: DEBUG nova.compute.provider_tree [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1233.351353] env[62627]: DEBUG nova.scheduler.client.report [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1233.363304] env[62627]: DEBUG nova.objects.instance [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'pci_requests' on Instance uuid c7e3246d-ffe7-4d81-a2ec-c5878649ed6b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1233.369027] env[62627]: DEBUG nova.scheduler.client.report [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1233.436355] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75b147fa-0e53-4c53-92db-c8591d327a0f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.448467] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94ce5ef-ee1e-4b52-a65f-f969e979e970 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.480385] env[62627]: DEBUG nova.compute.manager [req-f4363ff9-fb1f-43c5-aca1-6089ba1fa31e req-41fa9473-ce26-455e-8894-b9f01461b237 service nova] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Detach interface failed, port_id=d885a4e0-fbdd-41cf-9e49-1e16cd392ecf, reason: Instance 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1233.491573] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5258933e-83b4-448b-3f94-b36ea38f5d53, 'name': SearchDatastore_Task, 'duration_secs': 0.040279} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.492375] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67d19d18-1521-4158-add7-b5f89ae5f32c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.497164] env[62627]: DEBUG oslo_concurrency.lockutils [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Releasing lock "refresh_cache-b282093d-8013-436d-94d2-bc26161456ea" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.498173] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b3eda9-8c17-4055-8574-2cba47eaa7ce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.505161] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Resuming the VM {{(pid=62627) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1233.505602] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for the task: (returnval){ [ 1233.505602] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528365db-a1ae-a797-be34-cd2c4329ba7b" [ 1233.505602] env[62627]: _type = "Task" [ 1233.505602] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.506037] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-891dc98e-6355-41ca-82a6-637f9750506d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.519343] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528365db-a1ae-a797-be34-cd2c4329ba7b, 'name': SearchDatastore_Task, 'duration_secs': 0.009607} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.520422] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.520681] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f2eefd04-2e4d-48b2-9895-ae58070db970/f2eefd04-2e4d-48b2-9895-ae58070db970.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1233.521323] env[62627]: DEBUG oslo_vmware.api [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1233.521323] env[62627]: value = "task-2195250" [ 1233.521323] env[62627]: _type = "Task" [ 1233.521323] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.521323] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82750cea-dbdf-42a2-97fa-fc1a8cd3dafd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.533479] env[62627]: DEBUG oslo_vmware.api [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195250, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.534711] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for the task: (returnval){ [ 1233.534711] env[62627]: value = "task-2195251" [ 1233.534711] env[62627]: _type = "Task" [ 1233.534711] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.545581] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195251, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.564598] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e327be-9c9d-40a9-b773-6af567d27108 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.573326] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b3da74-3140-4032-a594-01b1303bcc0e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.605226] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea68e3d-8e26-429e-b119-27dfb95ea69e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.613881] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d0e7f6-b122-4af4-8420-f24f4c856981 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.629253] env[62627]: DEBUG nova.compute.provider_tree [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1233.651739] env[62627]: INFO nova.compute.manager [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Took 12.93 seconds to build instance. [ 1233.801485] env[62627]: INFO nova.compute.manager [-] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Took 1.24 seconds to deallocate network for instance. [ 1233.866026] env[62627]: DEBUG nova.objects.base [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1233.866297] env[62627]: DEBUG nova.network.neutron [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1233.943652] env[62627]: DEBUG nova.policy [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5e5eee141e496db2d2ea316c43e6be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e4f7ad3ebcf4c4d97bdbce58eec5a09', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1234.035770] env[62627]: DEBUG oslo_vmware.api [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195250, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.046351] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195251, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505913} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.046664] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f2eefd04-2e4d-48b2-9895-ae58070db970/f2eefd04-2e4d-48b2-9895-ae58070db970.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1234.046897] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1234.047221] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-415aaaa2-875d-4788-ab77-9c01b0a5d10e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.057784] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for the task: (returnval){ [ 1234.057784] env[62627]: value = "task-2195252" [ 1234.057784] env[62627]: _type = "Task" [ 1234.057784] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.068779] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195252, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.158025] env[62627]: DEBUG oslo_concurrency.lockutils [None req-33f0529e-10e3-4ee4-a217-4d92cb06fd7c tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Lock "4a4b57c8-251c-4ca8-b662-b96214110979" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.439s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.171827] env[62627]: DEBUG nova.scheduler.client.report [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 142 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1234.172178] env[62627]: DEBUG nova.compute.provider_tree [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 142 to 143 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1234.172356] env[62627]: DEBUG nova.compute.provider_tree [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1234.309815] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.534825] env[62627]: DEBUG oslo_vmware.api [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195250, 'name': PowerOnVM_Task, 'duration_secs': 0.786515} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.535155] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Resumed the VM {{(pid=62627) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1234.535357] env[62627]: DEBUG nova.compute.manager [None req-05827afa-bd59-4b42-816f-925809ecbdb8 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1234.536187] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063fdfd7-82a0-4691-8ba0-3fdcc9f1b820 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.567881] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195252, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095027} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.568379] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1234.569503] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6476726-6aa4-4943-aa1a-46e6bc7470b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.592683] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] f2eefd04-2e4d-48b2-9895-ae58070db970/f2eefd04-2e4d-48b2-9895-ae58070db970.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1234.593641] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f054e86b-7fd9-4088-834b-8f5b9a6b8c52 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.613987] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for the task: (returnval){ [ 1234.613987] env[62627]: value = "task-2195253" [ 1234.613987] env[62627]: _type = "Task" [ 1234.613987] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.622328] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195253, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.681552] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.217s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.681552] env[62627]: DEBUG nova.compute.manager [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1234.684245] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.472s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.684419] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.684572] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1234.684856] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.148s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.686224] env[62627]: INFO nova.compute.claims [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1234.689460] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6f6081-559d-4ab4-a0e9-bc919840fe2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.697927] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba3c809-5032-437c-a0c6-63745ec473db {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.712747] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baff1f35-18ad-4ee4-82ff-d080537e38de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.720786] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6224e2-78d0-4ce4-b15d-c3b52343f38d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.754068] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180076MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1234.754225] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.125344] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.190656] env[62627]: DEBUG nova.compute.utils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1235.192165] env[62627]: DEBUG nova.compute.manager [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1235.192372] env[62627]: DEBUG nova.network.neutron [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1235.254827] env[62627]: DEBUG nova.policy [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc7ce5392ecd486b962c3db9d43181e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f3a36c557145e38c528a628b1f3b67', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1235.360484] env[62627]: DEBUG nova.compute.manager [req-8722b590-750c-44c4-a45e-6d13e39b7472 req-9d367226-ea0e-42ec-a400-0d9d27514ae2 service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Received event network-changed-200bba8f-ca22-4dfa-9492-f4d2f2e6f212 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1235.360684] env[62627]: DEBUG nova.compute.manager [req-8722b590-750c-44c4-a45e-6d13e39b7472 req-9d367226-ea0e-42ec-a400-0d9d27514ae2 service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Refreshing instance network info cache due to event network-changed-200bba8f-ca22-4dfa-9492-f4d2f2e6f212. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1235.360895] env[62627]: DEBUG oslo_concurrency.lockutils [req-8722b590-750c-44c4-a45e-6d13e39b7472 req-9d367226-ea0e-42ec-a400-0d9d27514ae2 service nova] Acquiring lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.361050] env[62627]: DEBUG oslo_concurrency.lockutils [req-8722b590-750c-44c4-a45e-6d13e39b7472 req-9d367226-ea0e-42ec-a400-0d9d27514ae2 service nova] Acquired lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.361213] env[62627]: DEBUG nova.network.neutron [req-8722b590-750c-44c4-a45e-6d13e39b7472 req-9d367226-ea0e-42ec-a400-0d9d27514ae2 service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Refreshing network info cache for port 200bba8f-ca22-4dfa-9492-f4d2f2e6f212 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1235.627167] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.686735] env[62627]: DEBUG nova.network.neutron [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Successfully updated port: 1034439a-d24f-4eb6-8139-67b012254146 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1235.698534] env[62627]: DEBUG nova.compute.manager [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1235.763924] env[62627]: DEBUG nova.network.neutron [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Successfully created port: 57af03b0-8d4d-4f61-a384-800617af9be4 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1235.930116] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca0d10c-5ab3-43db-9f34-c3ce21c2365e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.938701] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccded79e-55f3-478e-b8f7-00b47e074513 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.971829] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6853ae7-cab7-4853-9b2d-8790665dc10c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.975429] env[62627]: DEBUG nova.compute.manager [req-fb8e2a84-edf3-4582-a1f1-b08114b96864 req-fcef6344-3d9e-4412-a36d-2d2475e74d29 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Received event network-vif-plugged-1034439a-d24f-4eb6-8139-67b012254146 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1235.975636] env[62627]: DEBUG oslo_concurrency.lockutils [req-fb8e2a84-edf3-4582-a1f1-b08114b96864 req-fcef6344-3d9e-4412-a36d-2d2475e74d29 service nova] Acquiring lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.975844] env[62627]: DEBUG oslo_concurrency.lockutils [req-fb8e2a84-edf3-4582-a1f1-b08114b96864 req-fcef6344-3d9e-4412-a36d-2d2475e74d29 service nova] Lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1235.976021] env[62627]: DEBUG oslo_concurrency.lockutils [req-fb8e2a84-edf3-4582-a1f1-b08114b96864 req-fcef6344-3d9e-4412-a36d-2d2475e74d29 service nova] Lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.976269] env[62627]: DEBUG nova.compute.manager [req-fb8e2a84-edf3-4582-a1f1-b08114b96864 req-fcef6344-3d9e-4412-a36d-2d2475e74d29 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] No waiting events found dispatching network-vif-plugged-1034439a-d24f-4eb6-8139-67b012254146 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1235.976349] env[62627]: WARNING nova.compute.manager [req-fb8e2a84-edf3-4582-a1f1-b08114b96864 req-fcef6344-3d9e-4412-a36d-2d2475e74d29 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Received unexpected event network-vif-plugged-1034439a-d24f-4eb6-8139-67b012254146 for instance with vm_state active and task_state None. [ 1235.983476] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208f55f5-24bc-449e-9153-15584aa686b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.998187] env[62627]: DEBUG nova.compute.provider_tree [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1236.130161] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195253, 'name': ReconfigVM_Task, 'duration_secs': 1.156504} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.130565] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Reconfigured VM instance instance-00000066 to attach disk [datastore2] f2eefd04-2e4d-48b2-9895-ae58070db970/f2eefd04-2e4d-48b2-9895-ae58070db970.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1236.132550] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce5b26d9-da77-4cf4-b503-65562d4737e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.141206] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for the task: (returnval){ [ 1236.141206] env[62627]: value = "task-2195254" [ 1236.141206] env[62627]: _type = "Task" [ 1236.141206] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.152988] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195254, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.190449] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1236.193961] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.193961] env[62627]: DEBUG nova.network.neutron [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1236.265023] env[62627]: DEBUG nova.network.neutron [req-8722b590-750c-44c4-a45e-6d13e39b7472 req-9d367226-ea0e-42ec-a400-0d9d27514ae2 service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Updated VIF entry in instance network info cache for port 200bba8f-ca22-4dfa-9492-f4d2f2e6f212. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1236.265023] env[62627]: DEBUG nova.network.neutron [req-8722b590-750c-44c4-a45e-6d13e39b7472 req-9d367226-ea0e-42ec-a400-0d9d27514ae2 service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Updating instance_info_cache with network_info: [{"id": "200bba8f-ca22-4dfa-9492-f4d2f2e6f212", "address": "fa:16:3e:53:df:4c", "network": {"id": "a684ed8e-4084-4a9e-b3c8-7a17d7827291", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1686789109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65aa68a50f0b4f44855294959300bd33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap200bba8f-ca", "ovs_interfaceid": "200bba8f-ca22-4dfa-9492-f4d2f2e6f212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.531595] env[62627]: DEBUG nova.scheduler.client.report [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 143 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1236.531864] env[62627]: DEBUG nova.compute.provider_tree [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 143 to 144 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1236.532060] env[62627]: DEBUG nova.compute.provider_tree [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1236.651888] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195254, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.711186] env[62627]: DEBUG nova.compute.manager [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1236.736589] env[62627]: WARNING nova.network.neutron [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] a1c5c227-8f1f-4881-a6d8-f446b64d9846 already exists in list: networks containing: ['a1c5c227-8f1f-4881-a6d8-f446b64d9846']. ignoring it [ 1236.744259] env[62627]: DEBUG nova.virt.hardware [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1236.744512] env[62627]: DEBUG nova.virt.hardware [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1236.744671] env[62627]: DEBUG nova.virt.hardware [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1236.744852] env[62627]: DEBUG nova.virt.hardware [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1236.745014] env[62627]: DEBUG nova.virt.hardware [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1236.745185] env[62627]: DEBUG nova.virt.hardware [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1236.745396] env[62627]: DEBUG nova.virt.hardware [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1236.745557] env[62627]: DEBUG nova.virt.hardware [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1236.745725] env[62627]: DEBUG nova.virt.hardware [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1236.745896] env[62627]: DEBUG nova.virt.hardware [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1236.746488] env[62627]: DEBUG nova.virt.hardware [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1236.747368] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafddcce-8353-4bcc-a0b6-6daea9f709da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.756039] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7dfcae-3418-40ca-a315-ee76c11143f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.771920] env[62627]: DEBUG oslo_concurrency.lockutils [req-8722b590-750c-44c4-a45e-6d13e39b7472 req-9d367226-ea0e-42ec-a400-0d9d27514ae2 service nova] Releasing lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.024806] env[62627]: DEBUG nova.network.neutron [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updating instance_info_cache with network_info: [{"id": "58815871-fdac-4406-a61e-4d7fca125122", "address": "fa:16:3e:69:fc:43", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58815871-fd", "ovs_interfaceid": "58815871-fdac-4406-a61e-4d7fca125122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1034439a-d24f-4eb6-8139-67b012254146", "address": "fa:16:3e:74:24:f6", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1034439a-d2", "ovs_interfaceid": "1034439a-d24f-4eb6-8139-67b012254146", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.038879] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.043246] env[62627]: DEBUG nova.compute.manager [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1237.043246] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.733s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.043246] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.045974] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.291s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.073375] env[62627]: INFO nova.scheduler.client.report [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted allocations for instance 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9 [ 1237.151378] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195254, 'name': Rename_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.412361] env[62627]: DEBUG nova.network.neutron [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Successfully updated port: 57af03b0-8d4d-4f61-a384-800617af9be4 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1237.527420] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.528285] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.528463] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.529368] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9d3a24-aa5d-4f6c-9951-d31f736a9fb6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.547931] env[62627]: DEBUG nova.compute.utils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1237.559961] env[62627]: DEBUG nova.virt.hardware [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1237.560252] env[62627]: DEBUG nova.virt.hardware [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1237.560413] env[62627]: DEBUG nova.virt.hardware [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1237.560595] env[62627]: DEBUG nova.virt.hardware [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1237.560742] env[62627]: DEBUG nova.virt.hardware [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1237.560887] env[62627]: DEBUG nova.virt.hardware [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1237.561101] env[62627]: DEBUG nova.virt.hardware [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1237.561265] env[62627]: DEBUG nova.virt.hardware [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1237.561429] env[62627]: DEBUG nova.virt.hardware [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1237.561949] env[62627]: DEBUG nova.virt.hardware [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1237.561949] env[62627]: DEBUG nova.virt.hardware [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1237.568685] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Reconfiguring VM to attach interface {{(pid=62627) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1237.573553] env[62627]: DEBUG nova.compute.manager [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1237.576000] env[62627]: DEBUG nova.compute.manager [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1237.576181] env[62627]: DEBUG nova.network.neutron [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1237.577832] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03fdf63a-b576-4f89-9ef4-dd08c1a53dbc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.595906] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0a59b3e6-07f2-4be8-936f-ca25e2fd0c04 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "36a8d4d3-d6b6-4b42-9514-0e29ee45aea9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.670s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.601191] env[62627]: DEBUG oslo_vmware.api [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1237.601191] env[62627]: value = "task-2195255" [ 1237.601191] env[62627]: _type = "Task" [ 1237.601191] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.609034] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.609241] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance e9d33611-e373-4303-937d-b43d893e51f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.609384] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.609567] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 3001147e-7be3-4f72-b330-8f99bf7cf3ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.609628] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance c7e3246d-ffe7-4d81-a2ec-c5878649ed6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.609696] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 30db4963-095b-4297-ad7c-0606451bd2b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.610009] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance b68042fd-4955-41e4-b14f-1f8268ace377 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.610327] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance b282093d-8013-436d-94d2-bc26161456ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.610327] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 1109aae0-62d2-4c55-8c68-f224f3de8b55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.610432] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 4a4b57c8-251c-4ca8-b662-b96214110979 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.610499] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance f2eefd04-2e4d-48b2-9895-ae58070db970 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.610607] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 1593ca50-bc97-464a-86ca-17995db15758 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.611342] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance f195662c-9493-469a-ba36-d712f9944b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.611632] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1237.611783] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1237.623519] env[62627]: DEBUG oslo_vmware.api [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195255, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.641500] env[62627]: DEBUG nova.policy [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4289413aec40498dab5f6170d69eb39a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74aed70194cf444884768a5c89fe8613', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1237.652521] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195254, 'name': Rename_Task, 'duration_secs': 1.357221} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.655105] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1237.655801] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18796dbc-5b90-4ffa-9906-c15c976fe99a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.664438] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for the task: (returnval){ [ 1237.664438] env[62627]: value = "task-2195256" [ 1237.664438] env[62627]: _type = "Task" [ 1237.664438] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.679851] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.820455] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f68580-bbce-4fa1-8109-0ab684797943 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.829068] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181c0a69-0c88-498d-8127-f9fdbb87c8ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.865028] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474c6d94-f159-4105-b494-167c0566e775 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.873614] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d01433-1d18-4c94-8e19-d1650dd557aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.887725] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.915380] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.915577] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.915759] env[62627]: DEBUG nova.network.neutron [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1238.115099] env[62627]: DEBUG oslo_vmware.api [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195255, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.175748] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195256, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.262227] env[62627]: DEBUG nova.network.neutron [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Successfully created port: 20bdf50e-6e1b-46c8-ae6a-26874ecb62a9 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1238.321915] env[62627]: DEBUG nova.compute.manager [req-4c4e30ca-0c6a-4ad4-b3f0-f7e2b0ed0290 req-b405a41f-7ec7-40e0-9113-e6513e850b0e service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Received event network-changed-200bba8f-ca22-4dfa-9492-f4d2f2e6f212 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1238.323415] env[62627]: DEBUG nova.compute.manager [req-4c4e30ca-0c6a-4ad4-b3f0-f7e2b0ed0290 req-b405a41f-7ec7-40e0-9113-e6513e850b0e service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Refreshing instance network info cache due to event network-changed-200bba8f-ca22-4dfa-9492-f4d2f2e6f212. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1238.323717] env[62627]: DEBUG oslo_concurrency.lockutils [req-4c4e30ca-0c6a-4ad4-b3f0-f7e2b0ed0290 req-b405a41f-7ec7-40e0-9113-e6513e850b0e service nova] Acquiring lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1238.323903] env[62627]: DEBUG oslo_concurrency.lockutils [req-4c4e30ca-0c6a-4ad4-b3f0-f7e2b0ed0290 req-b405a41f-7ec7-40e0-9113-e6513e850b0e service nova] Acquired lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.324100] env[62627]: DEBUG nova.network.neutron [req-4c4e30ca-0c6a-4ad4-b3f0-f7e2b0ed0290 req-b405a41f-7ec7-40e0-9113-e6513e850b0e service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Refreshing network info cache for port 200bba8f-ca22-4dfa-9492-f4d2f2e6f212 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1238.393312] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1238.470648] env[62627]: DEBUG nova.network.neutron [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1238.601870] env[62627]: DEBUG nova.compute.manager [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1238.613268] env[62627]: DEBUG oslo_vmware.api [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195255, 'name': ReconfigVM_Task, 'duration_secs': 0.561599} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.613947] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1238.614190] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Reconfigured VM to attach interface {{(pid=62627) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1238.639224] env[62627]: DEBUG nova.virt.hardware [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1238.639494] env[62627]: DEBUG nova.virt.hardware [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1238.639657] env[62627]: DEBUG nova.virt.hardware [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1238.639843] env[62627]: DEBUG nova.virt.hardware [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1238.639992] env[62627]: DEBUG nova.virt.hardware [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1238.640224] env[62627]: DEBUG nova.virt.hardware [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1238.640435] env[62627]: DEBUG nova.virt.hardware [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1238.640594] env[62627]: DEBUG nova.virt.hardware [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1238.640757] env[62627]: DEBUG nova.virt.hardware [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1238.640916] env[62627]: DEBUG nova.virt.hardware [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1238.641100] env[62627]: DEBUG nova.virt.hardware [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1238.641973] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1c5f92-8dec-4be0-984c-414b029dbb1a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.654396] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6858ded-3466-43fe-8ac2-cf4684ccbee9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.671025] env[62627]: DEBUG nova.network.neutron [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance_info_cache with network_info: [{"id": "57af03b0-8d4d-4f61-a384-800617af9be4", "address": "fa:16:3e:02:a2:5f", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57af03b0-8d", "ovs_interfaceid": "57af03b0-8d4d-4f61-a384-800617af9be4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.677728] env[62627]: DEBUG oslo_vmware.api [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195256, 'name': PowerOnVM_Task, 'duration_secs': 0.662384} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.677728] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1238.677728] env[62627]: INFO nova.compute.manager [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Took 9.37 seconds to spawn the instance on the hypervisor. [ 1238.677891] env[62627]: DEBUG nova.compute.manager [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1238.678605] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38255ad4-c1e2-48e1-86a5-34448378d342 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.773762] env[62627]: DEBUG nova.compute.manager [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Received event network-changed-1034439a-d24f-4eb6-8139-67b012254146 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1238.773925] env[62627]: DEBUG nova.compute.manager [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing instance network info cache due to event network-changed-1034439a-d24f-4eb6-8139-67b012254146. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1238.774168] env[62627]: DEBUG oslo_concurrency.lockutils [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] Acquiring lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1238.774319] env[62627]: DEBUG oslo_concurrency.lockutils [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] Acquired lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.774480] env[62627]: DEBUG nova.network.neutron [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing network info cache for port 1034439a-d24f-4eb6-8139-67b012254146 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1238.896993] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1238.897227] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.853s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.040138] env[62627]: DEBUG nova.network.neutron [req-4c4e30ca-0c6a-4ad4-b3f0-f7e2b0ed0290 req-b405a41f-7ec7-40e0-9113-e6513e850b0e service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Updated VIF entry in instance network info cache for port 200bba8f-ca22-4dfa-9492-f4d2f2e6f212. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1239.040646] env[62627]: DEBUG nova.network.neutron [req-4c4e30ca-0c6a-4ad4-b3f0-f7e2b0ed0290 req-b405a41f-7ec7-40e0-9113-e6513e850b0e service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Updating instance_info_cache with network_info: [{"id": "200bba8f-ca22-4dfa-9492-f4d2f2e6f212", "address": "fa:16:3e:53:df:4c", "network": {"id": "a684ed8e-4084-4a9e-b3c8-7a17d7827291", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1686789109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65aa68a50f0b4f44855294959300bd33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap200bba8f-ca", "ovs_interfaceid": "200bba8f-ca22-4dfa-9492-f4d2f2e6f212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.119245] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5fad0098-843f-499a-a490-4d957e83cb48 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-1034439a-d24f-4eb6-8139-67b012254146" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.367s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.173989] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.174337] env[62627]: DEBUG nova.compute.manager [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Instance network_info: |[{"id": "57af03b0-8d4d-4f61-a384-800617af9be4", "address": "fa:16:3e:02:a2:5f", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57af03b0-8d", "ovs_interfaceid": "57af03b0-8d4d-4f61-a384-800617af9be4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1239.174753] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:a2:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57af03b0-8d4d-4f61-a384-800617af9be4', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1239.182594] env[62627]: DEBUG oslo.service.loopingcall [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1239.182846] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1239.183065] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3ca6766-a95a-4d80-a9fa-543c95c1a909 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.205363] env[62627]: INFO nova.compute.manager [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Took 16.94 seconds to build instance. [ 1239.210402] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1239.210402] env[62627]: value = "task-2195257" [ 1239.210402] env[62627]: _type = "Task" [ 1239.210402] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.219463] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195257, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.532604] env[62627]: DEBUG nova.network.neutron [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updated VIF entry in instance network info cache for port 1034439a-d24f-4eb6-8139-67b012254146. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1239.533044] env[62627]: DEBUG nova.network.neutron [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updating instance_info_cache with network_info: [{"id": "58815871-fdac-4406-a61e-4d7fca125122", "address": "fa:16:3e:69:fc:43", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58815871-fd", "ovs_interfaceid": "58815871-fdac-4406-a61e-4d7fca125122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1034439a-d24f-4eb6-8139-67b012254146", "address": "fa:16:3e:74:24:f6", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1034439a-d2", "ovs_interfaceid": "1034439a-d24f-4eb6-8139-67b012254146", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.543978] env[62627]: DEBUG oslo_concurrency.lockutils [req-4c4e30ca-0c6a-4ad4-b3f0-f7e2b0ed0290 req-b405a41f-7ec7-40e0-9113-e6513e850b0e service nova] Releasing lock "refresh_cache-4a4b57c8-251c-4ca8-b662-b96214110979" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.707197] env[62627]: DEBUG oslo_concurrency.lockutils [None req-aad0a566-72e9-4176-b1ec-db4c50310d30 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Lock "f2eefd04-2e4d-48b2-9895-ae58070db970" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.452s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.720914] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195257, 'name': CreateVM_Task, 'duration_secs': 0.344606} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.722039] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1239.722039] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.722205] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.722531] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1239.722785] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b58afeb-4c40-459f-8926-00aafb1cb7a6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.727838] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1239.727838] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5206446b-8e2a-ef79-0a73-947180065161" [ 1239.727838] env[62627]: _type = "Task" [ 1239.727838] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.736326] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5206446b-8e2a-ef79-0a73-947180065161, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.858908] env[62627]: DEBUG oslo_concurrency.lockutils [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquiring lock "4a4b57c8-251c-4ca8-b662-b96214110979" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.859172] env[62627]: DEBUG oslo_concurrency.lockutils [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Lock "4a4b57c8-251c-4ca8-b662-b96214110979" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.859378] env[62627]: DEBUG oslo_concurrency.lockutils [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquiring lock "4a4b57c8-251c-4ca8-b662-b96214110979-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.859558] env[62627]: DEBUG oslo_concurrency.lockutils [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Lock "4a4b57c8-251c-4ca8-b662-b96214110979-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.859727] env[62627]: DEBUG oslo_concurrency.lockutils [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Lock "4a4b57c8-251c-4ca8-b662-b96214110979-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.861895] env[62627]: INFO nova.compute.manager [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Terminating instance [ 1239.939703] env[62627]: DEBUG nova.network.neutron [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Successfully updated port: 20bdf50e-6e1b-46c8-ae6a-26874ecb62a9 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1240.036445] env[62627]: DEBUG oslo_concurrency.lockutils [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] Releasing lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.036607] env[62627]: DEBUG nova.compute.manager [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Received event network-vif-plugged-57af03b0-8d4d-4f61-a384-800617af9be4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1240.036741] env[62627]: DEBUG oslo_concurrency.lockutils [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] Acquiring lock "1593ca50-bc97-464a-86ca-17995db15758-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.036943] env[62627]: DEBUG oslo_concurrency.lockutils [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] Lock "1593ca50-bc97-464a-86ca-17995db15758-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.037139] env[62627]: DEBUG oslo_concurrency.lockutils [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] Lock "1593ca50-bc97-464a-86ca-17995db15758-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.037302] env[62627]: DEBUG nova.compute.manager [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] No waiting events found dispatching network-vif-plugged-57af03b0-8d4d-4f61-a384-800617af9be4 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1240.037467] env[62627]: WARNING nova.compute.manager [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Received unexpected event network-vif-plugged-57af03b0-8d4d-4f61-a384-800617af9be4 for instance with vm_state building and task_state spawning. [ 1240.037630] env[62627]: DEBUG nova.compute.manager [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Received event network-changed-57af03b0-8d4d-4f61-a384-800617af9be4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1240.038397] env[62627]: DEBUG nova.compute.manager [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Refreshing instance network info cache due to event network-changed-57af03b0-8d4d-4f61-a384-800617af9be4. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1240.038397] env[62627]: DEBUG oslo_concurrency.lockutils [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] Acquiring lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.038397] env[62627]: DEBUG oslo_concurrency.lockutils [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] Acquired lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.038397] env[62627]: DEBUG nova.network.neutron [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Refreshing network info cache for port 57af03b0-8d4d-4f61-a384-800617af9be4 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1240.238928] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5206446b-8e2a-ef79-0a73-947180065161, 'name': SearchDatastore_Task, 'duration_secs': 0.010271} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.239272] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.239508] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1240.239786] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.239938] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.240141] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1240.240425] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a1376df-7d04-4e0c-92d8-dd797dfad83a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.250591] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1240.250787] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1240.251540] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a13c3343-3ba2-4518-9365-09099b95d236 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.258894] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1240.258894] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c8ff06-da93-20fc-9e2a-596529fb844c" [ 1240.258894] env[62627]: _type = "Task" [ 1240.258894] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.267082] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c8ff06-da93-20fc-9e2a-596529fb844c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.367815] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquiring lock "f2eefd04-2e4d-48b2-9895-ae58070db970" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.368180] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Lock "f2eefd04-2e4d-48b2-9895-ae58070db970" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.368411] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquiring lock "f2eefd04-2e4d-48b2-9895-ae58070db970-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.368599] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Lock "f2eefd04-2e4d-48b2-9895-ae58070db970-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.368767] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Lock "f2eefd04-2e4d-48b2-9895-ae58070db970-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.370641] env[62627]: DEBUG nova.compute.manager [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1240.370835] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1240.371345] env[62627]: INFO nova.compute.manager [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Terminating instance [ 1240.373108] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efea69d-9c1a-4c67-93df-c0f08209d53b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.383016] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1240.383280] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d2ac0b2-f182-4270-8f7b-c36d52affeae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.391778] env[62627]: DEBUG oslo_vmware.api [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for the task: (returnval){ [ 1240.391778] env[62627]: value = "task-2195258" [ 1240.391778] env[62627]: _type = "Task" [ 1240.391778] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.400767] env[62627]: DEBUG oslo_vmware.api [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195258, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.442785] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "refresh_cache-f195662c-9493-469a-ba36-d712f9944b28" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.442942] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "refresh_cache-f195662c-9493-469a-ba36-d712f9944b28" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.443184] env[62627]: DEBUG nova.network.neutron [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1240.620694] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "interface-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-1034439a-d24f-4eb6-8139-67b012254146" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.620991] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-1034439a-d24f-4eb6-8139-67b012254146" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.771609] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c8ff06-da93-20fc-9e2a-596529fb844c, 'name': SearchDatastore_Task, 'duration_secs': 0.009261} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.772618] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-face3003-9cb0-4f29-8558-434197fc3b7f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.779924] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1240.779924] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fc2f40-82b5-8eb8-180e-42423441cd21" [ 1240.779924] env[62627]: _type = "Task" [ 1240.779924] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.794104] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fc2f40-82b5-8eb8-180e-42423441cd21, 'name': SearchDatastore_Task, 'duration_secs': 0.0106} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.794366] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.794614] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 1593ca50-bc97-464a-86ca-17995db15758/1593ca50-bc97-464a-86ca-17995db15758.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1240.794877] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5e1b773-9002-4640-9036-83f6137878b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.802797] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1240.802797] env[62627]: value = "task-2195259" [ 1240.802797] env[62627]: _type = "Task" [ 1240.802797] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.812301] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.812581] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.812806] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.844168] env[62627]: DEBUG nova.compute.manager [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] Received event network-vif-plugged-20bdf50e-6e1b-46c8-ae6a-26874ecb62a9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1240.844398] env[62627]: DEBUG oslo_concurrency.lockutils [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] Acquiring lock "f195662c-9493-469a-ba36-d712f9944b28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.844597] env[62627]: DEBUG oslo_concurrency.lockutils [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] Lock "f195662c-9493-469a-ba36-d712f9944b28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.844766] env[62627]: DEBUG oslo_concurrency.lockutils [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] Lock "f195662c-9493-469a-ba36-d712f9944b28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.844948] env[62627]: DEBUG nova.compute.manager [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] No waiting events found dispatching network-vif-plugged-20bdf50e-6e1b-46c8-ae6a-26874ecb62a9 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1240.845159] env[62627]: WARNING nova.compute.manager [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] Received unexpected event network-vif-plugged-20bdf50e-6e1b-46c8-ae6a-26874ecb62a9 for instance with vm_state building and task_state spawning. [ 1240.845252] env[62627]: DEBUG nova.compute.manager [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] Received event network-changed-20bdf50e-6e1b-46c8-ae6a-26874ecb62a9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1240.845424] env[62627]: DEBUG nova.compute.manager [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] Refreshing instance network info cache due to event network-changed-20bdf50e-6e1b-46c8-ae6a-26874ecb62a9. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1240.845558] env[62627]: DEBUG oslo_concurrency.lockutils [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] Acquiring lock "refresh_cache-f195662c-9493-469a-ba36-d712f9944b28" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.851197] env[62627]: DEBUG nova.network.neutron [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updated VIF entry in instance network info cache for port 57af03b0-8d4d-4f61-a384-800617af9be4. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1240.851528] env[62627]: DEBUG nova.network.neutron [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance_info_cache with network_info: [{"id": "57af03b0-8d4d-4f61-a384-800617af9be4", "address": "fa:16:3e:02:a2:5f", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57af03b0-8d", "ovs_interfaceid": "57af03b0-8d4d-4f61-a384-800617af9be4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.878024] env[62627]: DEBUG nova.compute.manager [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1240.878290] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1240.879961] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5522763c-78b6-451d-a2a3-c32e020aaedd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.888263] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1240.888513] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cccfe9ed-e789-4873-aca7-d1c0a838c3bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.897087] env[62627]: DEBUG oslo_vmware.api [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for the task: (returnval){ [ 1240.897087] env[62627]: value = "task-2195260" [ 1240.897087] env[62627]: _type = "Task" [ 1240.897087] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.903343] env[62627]: DEBUG oslo_vmware.api [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195258, 'name': PowerOffVM_Task, 'duration_secs': 0.32674} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.903961] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1240.904157] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1240.904419] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b9772df-5bc6-41fe-ab97-447b72e7904b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.908993] env[62627]: DEBUG oslo_vmware.api [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195260, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.978643] env[62627]: DEBUG nova.network.neutron [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1240.986778] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1240.987018] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1240.987223] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Deleting the datastore file [datastore2] 4a4b57c8-251c-4ca8-b662-b96214110979 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1240.987502] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7e7b4b5-bf82-432a-a19e-b78f02119cc7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.996155] env[62627]: DEBUG oslo_vmware.api [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for the task: (returnval){ [ 1240.996155] env[62627]: value = "task-2195262" [ 1240.996155] env[62627]: _type = "Task" [ 1240.996155] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.010233] env[62627]: DEBUG oslo_vmware.api [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.125902] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1241.126847] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.128068] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49150f29-f9e8-466f-9bca-5a2e1a4c94c3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.150250] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba74c27-6a21-4df6-be1f-5b97c2476f15 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.179266] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Reconfiguring VM to detach interface {{(pid=62627) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1241.182356] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e194df6d-0a20-4018-aacc-ac5ea644e5a6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.202688] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1241.202688] env[62627]: value = "task-2195263" [ 1241.202688] env[62627]: _type = "Task" [ 1241.202688] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.212102] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.237869] env[62627]: DEBUG nova.network.neutron [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Updating instance_info_cache with network_info: [{"id": "20bdf50e-6e1b-46c8-ae6a-26874ecb62a9", "address": "fa:16:3e:ee:d5:70", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20bdf50e-6e", "ovs_interfaceid": "20bdf50e-6e1b-46c8-ae6a-26874ecb62a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.313666] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195259, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495384} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.314041] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 1593ca50-bc97-464a-86ca-17995db15758/1593ca50-bc97-464a-86ca-17995db15758.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1241.314114] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1241.314308] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55575300-563f-4d7c-9ffa-54574cf9d87d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.316553] env[62627]: INFO nova.compute.manager [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Detaching volume 4c7ed974-1052-4c7c-aece-d94f225c352b [ 1241.325346] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1241.325346] env[62627]: value = "task-2195264" [ 1241.325346] env[62627]: _type = "Task" [ 1241.325346] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.333488] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195264, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.353992] env[62627]: INFO nova.virt.block_device [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Attempting to driver detach volume 4c7ed974-1052-4c7c-aece-d94f225c352b from mountpoint /dev/sdb [ 1241.353992] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1241.354143] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447803', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'name': 'volume-4c7ed974-1052-4c7c-aece-d94f225c352b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1', 'attached_at': '', 'detached_at': '', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'serial': '4c7ed974-1052-4c7c-aece-d94f225c352b'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1241.354790] env[62627]: DEBUG oslo_concurrency.lockutils [req-2581a642-6a07-4888-8fad-3ebfe70f53b8 req-51b5985c-5297-4936-bf26-390b62ed449f service nova] Releasing lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.355705] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c463c425-04dc-4bbc-bc56-a5b0948cf25a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.378569] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4d40e0-06a9-4c35-9658-b402403e5e8b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.387337] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52751b6-21ec-46f8-b617-026e51636176 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.411336] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274fdee7-cc32-4c70-86c9-c3aa6fd823f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.418906] env[62627]: DEBUG oslo_vmware.api [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195260, 'name': PowerOffVM_Task, 'duration_secs': 0.294863} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.430693] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1241.430864] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1241.431161] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] The volume has not been displaced from its original location: [datastore2] volume-4c7ed974-1052-4c7c-aece-d94f225c352b/volume-4c7ed974-1052-4c7c-aece-d94f225c352b.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1241.436188] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfiguring VM instance instance-00000056 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1241.436456] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d96ad35-3a75-4ae0-b36e-0267768aeb44 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.437846] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-211708fa-9c27-44ae-a900-bcb8f1aa3be1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.456106] env[62627]: DEBUG oslo_vmware.api [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1241.456106] env[62627]: value = "task-2195266" [ 1241.456106] env[62627]: _type = "Task" [ 1241.456106] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.466328] env[62627]: DEBUG oslo_vmware.api [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195266, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.508715] env[62627]: DEBUG oslo_vmware.api [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Task: {'id': task-2195262, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.390299} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.509702] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1241.510040] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1241.510373] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1241.510706] env[62627]: INFO nova.compute.manager [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1241.511050] env[62627]: DEBUG oslo.service.loopingcall [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1241.512048] env[62627]: DEBUG nova.compute.manager [-] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1241.512153] env[62627]: DEBUG nova.network.neutron [-] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1241.514057] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1241.514252] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1241.514440] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Deleting the datastore file [datastore2] f2eefd04-2e4d-48b2-9895-ae58070db970 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1241.515108] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c5e44f1-3c73-42fc-943c-009f9ebb9280 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.522086] env[62627]: DEBUG oslo_vmware.api [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for the task: (returnval){ [ 1241.522086] env[62627]: value = "task-2195267" [ 1241.522086] env[62627]: _type = "Task" [ 1241.522086] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.531281] env[62627]: DEBUG oslo_vmware.api [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195267, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.713350] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.741131] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "refresh_cache-f195662c-9493-469a-ba36-d712f9944b28" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.741480] env[62627]: DEBUG nova.compute.manager [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Instance network_info: |[{"id": "20bdf50e-6e1b-46c8-ae6a-26874ecb62a9", "address": "fa:16:3e:ee:d5:70", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20bdf50e-6e", "ovs_interfaceid": "20bdf50e-6e1b-46c8-ae6a-26874ecb62a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1241.741791] env[62627]: DEBUG oslo_concurrency.lockutils [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] Acquired lock "refresh_cache-f195662c-9493-469a-ba36-d712f9944b28" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.741970] env[62627]: DEBUG nova.network.neutron [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] Refreshing network info cache for port 20bdf50e-6e1b-46c8-ae6a-26874ecb62a9 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1241.743200] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:d5:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '098df9b7-d759-47f7-b756-334848cb423b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20bdf50e-6e1b-46c8-ae6a-26874ecb62a9', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1241.750679] env[62627]: DEBUG oslo.service.loopingcall [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1241.751150] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f195662c-9493-469a-ba36-d712f9944b28] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1241.751475] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5533691c-1e7f-4deb-affd-18a86efa8a14 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.775115] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1241.775115] env[62627]: value = "task-2195268" [ 1241.775115] env[62627]: _type = "Task" [ 1241.775115] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.783621] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195268, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.836083] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195264, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073418} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.836397] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1241.837213] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6146fc7e-b506-46e4-846e-5fde86ec933d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.859729] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 1593ca50-bc97-464a-86ca-17995db15758/1593ca50-bc97-464a-86ca-17995db15758.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1241.860031] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c2b4370-6540-48e5-b471-3ef6843567f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.880377] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1241.880377] env[62627]: value = "task-2195269" [ 1241.880377] env[62627]: _type = "Task" [ 1241.880377] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.888720] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195269, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.966032] env[62627]: DEBUG oslo_vmware.api [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195266, 'name': ReconfigVM_Task, 'duration_secs': 0.26779} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.966397] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Reconfigured VM instance instance-00000056 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1241.971179] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1498abbb-4ea2-47f0-b1e7-00fbf0e58542 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.987021] env[62627]: DEBUG oslo_vmware.api [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1241.987021] env[62627]: value = "task-2195270" [ 1241.987021] env[62627]: _type = "Task" [ 1241.987021] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.997241] env[62627]: DEBUG oslo_vmware.api [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195270, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.032092] env[62627]: DEBUG oslo_vmware.api [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Task: {'id': task-2195267, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157843} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.032381] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1242.032566] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1242.032741] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1242.032916] env[62627]: INFO nova.compute.manager [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1242.033179] env[62627]: DEBUG oslo.service.loopingcall [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1242.033439] env[62627]: DEBUG nova.compute.manager [-] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1242.033517] env[62627]: DEBUG nova.network.neutron [-] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1242.213669] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.283925] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195268, 'name': CreateVM_Task, 'duration_secs': 0.35368} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.285491] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f195662c-9493-469a-ba36-d712f9944b28] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1242.286364] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1242.286538] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.286830] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1242.287228] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbf98552-2f90-4012-b3a5-e94bfb76822b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.292277] env[62627]: DEBUG nova.network.neutron [-] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.293546] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1242.293546] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52941b57-677a-412b-f924-b028ffa26975" [ 1242.293546] env[62627]: _type = "Task" [ 1242.293546] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.306051] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52941b57-677a-412b-f924-b028ffa26975, 'name': SearchDatastore_Task, 'duration_secs': 0.011008} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.306336] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1242.306563] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1242.306787] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1242.306930] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.307116] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1242.307579] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82464179-512c-4a07-ba7e-d85edb84aa6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.315496] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1242.315823] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1242.316365] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32a8fea5-7cc8-4744-80e4-c3b4cab2160c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.321864] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1242.321864] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5285682d-ee65-cc59-3864-56638a714a9f" [ 1242.321864] env[62627]: _type = "Task" [ 1242.321864] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.329619] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5285682d-ee65-cc59-3864-56638a714a9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.390368] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195269, 'name': ReconfigVM_Task, 'duration_secs': 0.336878} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.391256] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 1593ca50-bc97-464a-86ca-17995db15758/1593ca50-bc97-464a-86ca-17995db15758.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1242.391381] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-951018b5-2834-4f52-889d-b21e4fbf885d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.400599] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1242.400599] env[62627]: value = "task-2195271" [ 1242.400599] env[62627]: _type = "Task" [ 1242.400599] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.409023] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195271, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.475568] env[62627]: DEBUG nova.network.neutron [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] Updated VIF entry in instance network info cache for port 20bdf50e-6e1b-46c8-ae6a-26874ecb62a9. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1242.475951] env[62627]: DEBUG nova.network.neutron [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] Updating instance_info_cache with network_info: [{"id": "20bdf50e-6e1b-46c8-ae6a-26874ecb62a9", "address": "fa:16:3e:ee:d5:70", "network": {"id": "f5de376b-cb15-4145-90b2-664ca4fa638e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1451402594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74aed70194cf444884768a5c89fe8613", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20bdf50e-6e", "ovs_interfaceid": "20bdf50e-6e1b-46c8-ae6a-26874ecb62a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.497796] env[62627]: DEBUG oslo_vmware.api [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195270, 'name': ReconfigVM_Task, 'duration_secs': 0.166813} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.498136] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447803', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'name': 'volume-4c7ed974-1052-4c7c-aece-d94f225c352b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1', 'attached_at': '', 'detached_at': '', 'volume_id': '4c7ed974-1052-4c7c-aece-d94f225c352b', 'serial': '4c7ed974-1052-4c7c-aece-d94f225c352b'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1242.714508] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.786517] env[62627]: DEBUG nova.network.neutron [-] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.795200] env[62627]: INFO nova.compute.manager [-] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Took 1.28 seconds to deallocate network for instance. [ 1242.832948] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5285682d-ee65-cc59-3864-56638a714a9f, 'name': SearchDatastore_Task, 'duration_secs': 0.009225} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.833749] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-785e5ecd-9a6f-4a83-9a4a-2197477a9076 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.840478] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1242.840478] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5245e551-63ce-b6ac-3f02-009f191552b4" [ 1242.840478] env[62627]: _type = "Task" [ 1242.840478] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.849713] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5245e551-63ce-b6ac-3f02-009f191552b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.912116] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195271, 'name': Rename_Task, 'duration_secs': 0.137478} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.912204] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1242.912448] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-319712d8-274d-4b06-aeca-e8bdd58db44d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.917055] env[62627]: DEBUG nova.compute.manager [req-fa72ca15-4c8f-4343-8ab2-9f0333fde26e req-0138f3f0-cc5b-4971-b3f9-8e812947cefa service nova] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Received event network-vif-deleted-200bba8f-ca22-4dfa-9492-f4d2f2e6f212 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1242.917312] env[62627]: DEBUG nova.compute.manager [req-fa72ca15-4c8f-4343-8ab2-9f0333fde26e req-0138f3f0-cc5b-4971-b3f9-8e812947cefa service nova] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Received event network-vif-deleted-98d518c4-51bc-4451-a4d2-6bd21f656d3d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1242.920271] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1242.920271] env[62627]: value = "task-2195272" [ 1242.920271] env[62627]: _type = "Task" [ 1242.920271] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.929869] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195272, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.978731] env[62627]: DEBUG oslo_concurrency.lockutils [req-72c15c7b-3614-4b1a-997c-b8a243b0f7e3 req-16afa6d6-95c0-414a-a2bd-f36b3fdf7fe5 service nova] Releasing lock "refresh_cache-f195662c-9493-469a-ba36-d712f9944b28" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1243.040371] env[62627]: DEBUG nova.objects.instance [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lazy-loading 'flavor' on Instance uuid f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1243.214591] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.289439] env[62627]: INFO nova.compute.manager [-] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Took 1.26 seconds to deallocate network for instance. [ 1243.301675] env[62627]: DEBUG oslo_concurrency.lockutils [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.301937] env[62627]: DEBUG oslo_concurrency.lockutils [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.302169] env[62627]: DEBUG nova.objects.instance [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Lazy-loading 'resources' on Instance uuid 4a4b57c8-251c-4ca8-b662-b96214110979 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1243.352200] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5245e551-63ce-b6ac-3f02-009f191552b4, 'name': SearchDatastore_Task, 'duration_secs': 0.010132} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.352483] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1243.352676] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f195662c-9493-469a-ba36-d712f9944b28/f195662c-9493-469a-ba36-d712f9944b28.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1243.352948] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-54265e50-9703-4471-971c-5d5e90326d60 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.361516] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1243.361516] env[62627]: value = "task-2195273" [ 1243.361516] env[62627]: _type = "Task" [ 1243.361516] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.372389] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.431586] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195272, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.716467] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.742199] env[62627]: DEBUG oslo_concurrency.lockutils [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "b282093d-8013-436d-94d2-bc26161456ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.742489] env[62627]: DEBUG oslo_concurrency.lockutils [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "b282093d-8013-436d-94d2-bc26161456ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.742702] env[62627]: DEBUG oslo_concurrency.lockutils [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "b282093d-8013-436d-94d2-bc26161456ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.742889] env[62627]: DEBUG oslo_concurrency.lockutils [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "b282093d-8013-436d-94d2-bc26161456ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.743231] env[62627]: DEBUG oslo_concurrency.lockutils [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "b282093d-8013-436d-94d2-bc26161456ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.745734] env[62627]: INFO nova.compute.manager [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Terminating instance [ 1243.798667] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.875046] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195273, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.933271] env[62627]: DEBUG oslo_vmware.api [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195272, 'name': PowerOnVM_Task, 'duration_secs': 0.580547} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.936518] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1243.936750] env[62627]: INFO nova.compute.manager [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Took 7.23 seconds to spawn the instance on the hypervisor. [ 1243.936937] env[62627]: DEBUG nova.compute.manager [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1243.938096] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f66cff-2045-456c-ac70-b79620899fa2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.017779] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4ab609-1d12-466f-99f9-e7a98155875d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.035495] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdd5b42-fbe6-44ba-98a3-4a6525357059 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.074253] env[62627]: DEBUG oslo_concurrency.lockutils [None req-bd0246fa-c68d-4c55-9548-2babfce085b3 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.261s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.075741] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4278fc-fffe-4cff-abdb-32e587094f10 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.091885] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36474346-608c-4d1f-abf4-48b4a35b4704 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.108379] env[62627]: DEBUG nova.compute.provider_tree [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1244.217526] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.251447] env[62627]: DEBUG nova.compute.manager [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1244.251447] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1244.253040] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1aad29-13fd-4d14-b000-c1528f133d6c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.263820] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1244.264195] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-671cf879-ce44-404a-a344-2a4fcf6fae68 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.273537] env[62627]: DEBUG oslo_vmware.api [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1244.273537] env[62627]: value = "task-2195274" [ 1244.273537] env[62627]: _type = "Task" [ 1244.273537] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.287973] env[62627]: DEBUG oslo_vmware.api [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195274, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.373943] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195273, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.748515} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.374465] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] f195662c-9493-469a-ba36-d712f9944b28/f195662c-9493-469a-ba36-d712f9944b28.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1244.374465] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1244.374797] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e27f4923-f459-4490-84a3-7e06416f7960 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.384131] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1244.384131] env[62627]: value = "task-2195275" [ 1244.384131] env[62627]: _type = "Task" [ 1244.384131] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.393693] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195275, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.464276] env[62627]: INFO nova.compute.manager [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Took 14.42 seconds to build instance. [ 1244.645265] env[62627]: DEBUG nova.scheduler.client.report [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 144 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1244.645543] env[62627]: DEBUG nova.compute.provider_tree [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 144 to 145 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1244.645726] env[62627]: DEBUG nova.compute.provider_tree [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1244.721251] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.784611] env[62627]: DEBUG oslo_vmware.api [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195274, 'name': PowerOffVM_Task, 'duration_secs': 0.218857} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.784873] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1244.785056] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1244.785313] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ddc4fc85-bcf8-4048-a627-95f78431bf60 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.880512] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1244.880726] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1244.880913] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Deleting the datastore file [datastore2] b282093d-8013-436d-94d2-bc26161456ea {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1244.881216] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7479a5fc-52e3-4efa-bd88-19f72c6aa7ee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.889510] env[62627]: DEBUG oslo_vmware.api [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for the task: (returnval){ [ 1244.889510] env[62627]: value = "task-2195277" [ 1244.889510] env[62627]: _type = "Task" [ 1244.889510] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.895765] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195275, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066755} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.896435] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1244.897183] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6ea078-3059-4453-96d1-c1d1ba89d497 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.902492] env[62627]: DEBUG oslo_vmware.api [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195277, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.922327] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] f195662c-9493-469a-ba36-d712f9944b28/f195662c-9493-469a-ba36-d712f9944b28.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1244.922896] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e08538b-d5f7-4dec-936f-339cc3f54653 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.943015] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1244.943015] env[62627]: value = "task-2195278" [ 1244.943015] env[62627]: _type = "Task" [ 1244.943015] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.951643] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195278, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.966486] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7adf3789-ecdb-4d6c-9a3a-650ca2f658c8 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.934s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.084551] env[62627]: DEBUG oslo_concurrency.lockutils [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.084829] env[62627]: DEBUG oslo_concurrency.lockutils [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.085060] env[62627]: DEBUG oslo_concurrency.lockutils [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.085276] env[62627]: DEBUG oslo_concurrency.lockutils [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.085468] env[62627]: DEBUG oslo_concurrency.lockutils [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.087647] env[62627]: INFO nova.compute.manager [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Terminating instance [ 1245.151312] env[62627]: DEBUG oslo_concurrency.lockutils [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.849s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.154470] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.356s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.154579] env[62627]: DEBUG nova.objects.instance [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Lazy-loading 'resources' on Instance uuid f2eefd04-2e4d-48b2-9895-ae58070db970 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1245.174090] env[62627]: INFO nova.scheduler.client.report [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Deleted allocations for instance 4a4b57c8-251c-4ca8-b662-b96214110979 [ 1245.219537] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.399838] env[62627]: DEBUG oslo_vmware.api [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Task: {'id': task-2195277, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.326058} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.400175] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1245.400423] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1245.400671] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1245.400899] env[62627]: INFO nova.compute.manager [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] [instance: b282093d-8013-436d-94d2-bc26161456ea] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1245.401207] env[62627]: DEBUG oslo.service.loopingcall [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1245.401734] env[62627]: DEBUG nova.compute.manager [-] [instance: b282093d-8013-436d-94d2-bc26161456ea] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1245.401883] env[62627]: DEBUG nova.network.neutron [-] [instance: b282093d-8013-436d-94d2-bc26161456ea] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1245.454339] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195278, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.591442] env[62627]: DEBUG nova.compute.manager [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1245.591712] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1245.592736] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d180bcdd-d238-4fea-baf1-34e826c30165 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.601131] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1245.601587] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47ca624c-baa5-46c6-98f7-1386fc9ba079 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.609166] env[62627]: DEBUG oslo_vmware.api [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1245.609166] env[62627]: value = "task-2195279" [ 1245.609166] env[62627]: _type = "Task" [ 1245.609166] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.618825] env[62627]: DEBUG oslo_vmware.api [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.686130] env[62627]: DEBUG oslo_concurrency.lockutils [None req-420e6c21-51cc-4f44-a5ae-47375100075f tempest-FloatingIPsAssociationNegativeTestJSON-1740626313 tempest-FloatingIPsAssociationNegativeTestJSON-1740626313-project-member] Lock "4a4b57c8-251c-4ca8-b662-b96214110979" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.827s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.721518] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.854573] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7d4ed1-b403-414f-b6f1-f4aea372154b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.866158] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39bf690-d0c1-4e36-8789-d5d421609888 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.873830] env[62627]: DEBUG oslo_concurrency.lockutils [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "30db4963-095b-4297-ad7c-0606451bd2b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.874597] env[62627]: DEBUG oslo_concurrency.lockutils [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "30db4963-095b-4297-ad7c-0606451bd2b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.875100] env[62627]: DEBUG oslo_concurrency.lockutils [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "30db4963-095b-4297-ad7c-0606451bd2b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.875497] env[62627]: DEBUG oslo_concurrency.lockutils [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "30db4963-095b-4297-ad7c-0606451bd2b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.876091] env[62627]: DEBUG oslo_concurrency.lockutils [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "30db4963-095b-4297-ad7c-0606451bd2b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.880350] env[62627]: INFO nova.compute.manager [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Terminating instance [ 1245.932012] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4391960-5e05-4fd7-9308-442d58bbd5a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.944083] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c94c4d-3146-4161-bd6e-1e4346a03913 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.962056] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195278, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.970944] env[62627]: DEBUG nova.compute.provider_tree [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1246.003474] env[62627]: DEBUG nova.compute.manager [req-b653d152-392b-4793-975a-85e60b140442 req-1f888bef-f651-4dec-9816-baab8ea6ecc1 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Received event network-vif-deleted-823150e9-f3ee-44ab-a36b-c8abdca8b70e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1246.003680] env[62627]: INFO nova.compute.manager [req-b653d152-392b-4793-975a-85e60b140442 req-1f888bef-f651-4dec-9816-baab8ea6ecc1 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Neutron deleted interface 823150e9-f3ee-44ab-a36b-c8abdca8b70e; detaching it from the instance and deleting it from the info cache [ 1246.003956] env[62627]: DEBUG nova.network.neutron [req-b653d152-392b-4793-975a-85e60b140442 req-1f888bef-f651-4dec-9816-baab8ea6ecc1 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.119999] env[62627]: DEBUG oslo_vmware.api [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195279, 'name': PowerOffVM_Task, 'duration_secs': 0.362089} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.120385] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1246.120607] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1246.120921] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6888bab-02f2-4e75-aec4-c052ebdb3882 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.194448] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1246.194758] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1246.195017] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleting the datastore file [datastore2] f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1246.195336] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a4e490a-4dfe-408a-8ba4-774baf7e7442 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.203964] env[62627]: DEBUG oslo_vmware.api [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1246.203964] env[62627]: value = "task-2195281" [ 1246.203964] env[62627]: _type = "Task" [ 1246.203964] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.215792] env[62627]: DEBUG oslo_vmware.api [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195281, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.221942] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.389449] env[62627]: DEBUG nova.compute.manager [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1246.389449] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1246.389449] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e858da-a071-4f80-9b1e-98be3af6a85c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.395802] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1246.396121] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0eb50c14-1715-4c13-b8ce-b007259ad316 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.403478] env[62627]: DEBUG oslo_vmware.api [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1246.403478] env[62627]: value = "task-2195282" [ 1246.403478] env[62627]: _type = "Task" [ 1246.403478] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.412869] env[62627]: DEBUG oslo_vmware.api [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.461786] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195278, 'name': ReconfigVM_Task, 'duration_secs': 1.041734} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.462083] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Reconfigured VM instance instance-00000068 to attach disk [datastore2] f195662c-9493-469a-ba36-d712f9944b28/f195662c-9493-469a-ba36-d712f9944b28.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1246.463170] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fec94ee5-a4ff-4ef7-adfb-1f3789d5cf27 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.471300] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1246.471300] env[62627]: value = "task-2195283" [ 1246.471300] env[62627]: _type = "Task" [ 1246.471300] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.475273] env[62627]: DEBUG nova.scheduler.client.report [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1246.484831] env[62627]: DEBUG nova.network.neutron [-] [instance: b282093d-8013-436d-94d2-bc26161456ea] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.486039] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195283, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.506783] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea1088bf-7330-42e1-9e2b-1bbe3aba7627 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.519912] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57d4bf4-63a5-4018-bc92-32e3076951bd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.553768] env[62627]: DEBUG nova.compute.manager [req-b653d152-392b-4793-975a-85e60b140442 req-1f888bef-f651-4dec-9816-baab8ea6ecc1 service nova] [instance: b282093d-8013-436d-94d2-bc26161456ea] Detach interface failed, port_id=823150e9-f3ee-44ab-a36b-c8abdca8b70e, reason: Instance b282093d-8013-436d-94d2-bc26161456ea could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1246.717205] env[62627]: DEBUG oslo_vmware.api [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195281, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164324} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.717205] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1246.717205] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1246.717205] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1246.717205] env[62627]: INFO nova.compute.manager [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1246.717205] env[62627]: DEBUG oslo.service.loopingcall [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1246.717205] env[62627]: DEBUG nova.compute.manager [-] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1246.717205] env[62627]: DEBUG nova.network.neutron [-] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1246.722046] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.784297] env[62627]: INFO nova.compute.manager [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Rebuilding instance [ 1246.837032] env[62627]: DEBUG nova.compute.manager [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1246.837819] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2302c460-27d8-4428-bb25-75fbe4a31984 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.914481] env[62627]: DEBUG oslo_vmware.api [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195282, 'name': PowerOffVM_Task, 'duration_secs': 0.191111} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.915357] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1246.915357] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1246.915546] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4dca584-7d00-48bb-b5b8-f093015e25ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.977686] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1246.977910] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1246.978681] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleting the datastore file [datastore2] 30db4963-095b-4297-ad7c-0606451bd2b6 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1246.979503] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8be217ab-c17e-412e-9f84-cf37fd16e8ca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.982329] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.828s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.989248] env[62627]: INFO nova.compute.manager [-] [instance: b282093d-8013-436d-94d2-bc26161456ea] Took 1.59 seconds to deallocate network for instance. [ 1246.989248] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195283, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.993874] env[62627]: DEBUG oslo_vmware.api [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1246.993874] env[62627]: value = "task-2195285" [ 1246.993874] env[62627]: _type = "Task" [ 1246.993874] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.002290] env[62627]: DEBUG oslo_vmware.api [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195285, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.032588] env[62627]: INFO nova.scheduler.client.report [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Deleted allocations for instance f2eefd04-2e4d-48b2-9895-ae58070db970 [ 1247.222523] env[62627]: DEBUG oslo_vmware.api [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195263, 'name': ReconfigVM_Task, 'duration_secs': 5.792648} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.223738] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1247.223738] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Reconfigured VM to detach interface {{(pid=62627) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1247.485057] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195283, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.495498] env[62627]: DEBUG oslo_concurrency.lockutils [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.495771] env[62627]: DEBUG oslo_concurrency.lockutils [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.496010] env[62627]: DEBUG nova.objects.instance [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lazy-loading 'resources' on Instance uuid b282093d-8013-436d-94d2-bc26161456ea {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.513010] env[62627]: DEBUG oslo_vmware.api [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195285, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14392} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.514619] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1247.514856] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1247.515079] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1247.515260] env[62627]: INFO nova.compute.manager [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1247.515498] env[62627]: DEBUG oslo.service.loopingcall [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1247.515701] env[62627]: DEBUG nova.compute.manager [-] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1247.515799] env[62627]: DEBUG nova.network.neutron [-] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1247.548742] env[62627]: DEBUG oslo_concurrency.lockutils [None req-fce709e6-4a83-4427-b1fc-f67db27d6a15 tempest-ServerPasswordTestJSON-887539007 tempest-ServerPasswordTestJSON-887539007-project-member] Lock "f2eefd04-2e4d-48b2-9895-ae58070db970" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.180s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.675051] env[62627]: DEBUG nova.network.neutron [-] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.852660] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1247.852975] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a17d673f-14db-4cd1-8a5c-9d58c9885d48 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.862141] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1247.862141] env[62627]: value = "task-2195286" [ 1247.862141] env[62627]: _type = "Task" [ 1247.862141] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.872115] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195286, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.983469] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195283, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.046778] env[62627]: DEBUG nova.compute.manager [req-a2272498-a91f-4e1e-8b03-e06d5c92061b req-ca21f1a0-08c3-4da8-ab5f-b957dbc95dd3 service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Received event network-changed-57af03b0-8d4d-4f61-a384-800617af9be4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1248.047511] env[62627]: DEBUG nova.compute.manager [req-a2272498-a91f-4e1e-8b03-e06d5c92061b req-ca21f1a0-08c3-4da8-ab5f-b957dbc95dd3 service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Refreshing instance network info cache due to event network-changed-57af03b0-8d4d-4f61-a384-800617af9be4. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1248.047511] env[62627]: DEBUG oslo_concurrency.lockutils [req-a2272498-a91f-4e1e-8b03-e06d5c92061b req-ca21f1a0-08c3-4da8-ab5f-b957dbc95dd3 service nova] Acquiring lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.047511] env[62627]: DEBUG oslo_concurrency.lockutils [req-a2272498-a91f-4e1e-8b03-e06d5c92061b req-ca21f1a0-08c3-4da8-ab5f-b957dbc95dd3 service nova] Acquired lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.047511] env[62627]: DEBUG nova.network.neutron [req-a2272498-a91f-4e1e-8b03-e06d5c92061b req-ca21f1a0-08c3-4da8-ab5f-b957dbc95dd3 service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Refreshing network info cache for port 57af03b0-8d4d-4f61-a384-800617af9be4 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1248.177724] env[62627]: INFO nova.compute.manager [-] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Took 1.46 seconds to deallocate network for instance. [ 1248.186140] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586357c2-5b91-4d55-bd41-378d96c0fc82 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.192611] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf56fe4-c337-4038-919b-c753249afe0d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.228717] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e86319b-4605-4c7d-b21c-5a10781f67c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.236575] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b426b082-3510-4f1a-abc3-739fdc197994 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.253557] env[62627]: DEBUG nova.compute.provider_tree [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1248.372970] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195286, 'name': PowerOffVM_Task, 'duration_secs': 0.234091} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.373276] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1248.373508] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1248.374327] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa00af2f-517d-4512-bb36-a8992bb2bc1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.382017] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1248.382297] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-928aecb7-0218-4f12-9a45-f9b95ab063f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.438819] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1248.439067] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1248.439547] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleting the datastore file [datastore2] b68042fd-4955-41e4-b14f-1f8268ace377 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1248.439614] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e861ab09-7a1e-4bc4-81ce-1a1417d08153 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.446443] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1248.446443] env[62627]: value = "task-2195288" [ 1248.446443] env[62627]: _type = "Task" [ 1248.446443] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.455355] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.485411] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195283, 'name': Rename_Task, 'duration_secs': 1.824853} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.485726] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1248.485977] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d4152f6-3686-4eb0-8ecd-06c9a9b8d109 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.492941] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1248.492941] env[62627]: value = "task-2195289" [ 1248.492941] env[62627]: _type = "Task" [ 1248.492941] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.504252] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195289, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.523595] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.524245] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.524585] env[62627]: DEBUG nova.network.neutron [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1248.685992] env[62627]: DEBUG oslo_concurrency.lockutils [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.801806] env[62627]: DEBUG nova.scheduler.client.report [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 145 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1248.802089] env[62627]: DEBUG nova.compute.provider_tree [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 145 to 146 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1248.802267] env[62627]: DEBUG nova.compute.provider_tree [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1248.947297] env[62627]: DEBUG nova.compute.manager [req-cb3cb467-b276-4f1f-ada7-59049f1d9fe9 req-ee5090d3-1ff7-4cdd-ac06-a81be4ee30ca service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Received event network-changed-58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1248.947297] env[62627]: DEBUG nova.compute.manager [req-cb3cb467-b276-4f1f-ada7-59049f1d9fe9 req-ee5090d3-1ff7-4cdd-ac06-a81be4ee30ca service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing instance network info cache due to event network-changed-58815871-fdac-4406-a61e-4d7fca125122. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1248.947297] env[62627]: DEBUG oslo_concurrency.lockutils [req-cb3cb467-b276-4f1f-ada7-59049f1d9fe9 req-ee5090d3-1ff7-4cdd-ac06-a81be4ee30ca service nova] Acquiring lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.961797] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195288, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136718} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.962247] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1248.962877] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1248.963207] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1248.971331] env[62627]: DEBUG nova.network.neutron [req-a2272498-a91f-4e1e-8b03-e06d5c92061b req-ca21f1a0-08c3-4da8-ab5f-b957dbc95dd3 service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updated VIF entry in instance network info cache for port 57af03b0-8d4d-4f61-a384-800617af9be4. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1248.971331] env[62627]: DEBUG nova.network.neutron [req-a2272498-a91f-4e1e-8b03-e06d5c92061b req-ca21f1a0-08c3-4da8-ab5f-b957dbc95dd3 service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance_info_cache with network_info: [{"id": "57af03b0-8d4d-4f61-a384-800617af9be4", "address": "fa:16:3e:02:a2:5f", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57af03b0-8d", "ovs_interfaceid": "57af03b0-8d4d-4f61-a384-800617af9be4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.004852] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195289, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.011267] env[62627]: DEBUG nova.network.neutron [-] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.311694] env[62627]: DEBUG oslo_concurrency.lockutils [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.816s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.314819] env[62627]: DEBUG oslo_concurrency.lockutils [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.629s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.314819] env[62627]: DEBUG nova.objects.instance [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lazy-loading 'resources' on Instance uuid f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1249.344988] env[62627]: INFO nova.scheduler.client.report [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Deleted allocations for instance b282093d-8013-436d-94d2-bc26161456ea [ 1249.393471] env[62627]: INFO nova.network.neutron [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Port 1034439a-d24f-4eb6-8139-67b012254146 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1249.393850] env[62627]: DEBUG nova.network.neutron [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updating instance_info_cache with network_info: [{"id": "58815871-fdac-4406-a61e-4d7fca125122", "address": "fa:16:3e:69:fc:43", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58815871-fd", "ovs_interfaceid": "58815871-fdac-4406-a61e-4d7fca125122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.472474] env[62627]: DEBUG oslo_concurrency.lockutils [req-a2272498-a91f-4e1e-8b03-e06d5c92061b req-ca21f1a0-08c3-4da8-ab5f-b957dbc95dd3 service nova] Releasing lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1249.473150] env[62627]: DEBUG nova.compute.manager [req-a2272498-a91f-4e1e-8b03-e06d5c92061b req-ca21f1a0-08c3-4da8-ab5f-b957dbc95dd3 service nova] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Received event network-vif-deleted-291e9d98-ac72-43f7-b745-9f0d290580fa {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1249.506834] env[62627]: DEBUG oslo_vmware.api [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195289, 'name': PowerOnVM_Task, 'duration_secs': 0.892187} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.507290] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1249.507732] env[62627]: INFO nova.compute.manager [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Took 10.91 seconds to spawn the instance on the hypervisor. [ 1249.508186] env[62627]: DEBUG nova.compute.manager [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1249.510376] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2133a2e2-236a-4dae-88d4-0d87bfdfb08f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.516621] env[62627]: INFO nova.compute.manager [-] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Took 2.00 seconds to deallocate network for instance. [ 1249.710524] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "interface-1109aae0-62d2-4c55-8c68-f224f3de8b55-1034439a-d24f-4eb6-8139-67b012254146" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.710808] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-1109aae0-62d2-4c55-8c68-f224f3de8b55-1034439a-d24f-4eb6-8139-67b012254146" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.711201] env[62627]: DEBUG nova.objects.instance [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'flavor' on Instance uuid 1109aae0-62d2-4c55-8c68-f224f3de8b55 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1249.856160] env[62627]: DEBUG oslo_concurrency.lockutils [None req-82ca3303-f204-418f-9e8f-3588ac130f28 tempest-ServersNegativeTestJSON-470999703 tempest-ServersNegativeTestJSON-470999703-project-member] Lock "b282093d-8013-436d-94d2-bc26161456ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.113s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.899903] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1249.902137] env[62627]: DEBUG oslo_concurrency.lockutils [req-cb3cb467-b276-4f1f-ada7-59049f1d9fe9 req-ee5090d3-1ff7-4cdd-ac06-a81be4ee30ca service nova] Acquired lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.902748] env[62627]: DEBUG nova.network.neutron [req-cb3cb467-b276-4f1f-ada7-59049f1d9fe9 req-ee5090d3-1ff7-4cdd-ac06-a81be4ee30ca service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Refreshing network info cache for port 58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1250.006920] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cb3c78-4285-4a31-934d-b17603ba78f8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.012379] env[62627]: DEBUG nova.virt.hardware [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1250.012608] env[62627]: DEBUG nova.virt.hardware [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1250.012763] env[62627]: DEBUG nova.virt.hardware [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1250.012942] env[62627]: DEBUG nova.virt.hardware [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1250.013100] env[62627]: DEBUG nova.virt.hardware [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1250.013258] env[62627]: DEBUG nova.virt.hardware [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1250.013727] env[62627]: DEBUG nova.virt.hardware [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1250.013924] env[62627]: DEBUG nova.virt.hardware [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1250.014119] env[62627]: DEBUG nova.virt.hardware [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1250.014586] env[62627]: DEBUG nova.virt.hardware [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1250.014713] env[62627]: DEBUG nova.virt.hardware [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1250.015528] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca19dfce-349a-41d0-9a2f-e9ad692dbf87 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.025346] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec553ec-f04f-4010-acc0-fd74d5b66864 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.029377] env[62627]: DEBUG oslo_concurrency.lockutils [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.031836] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034c9f1a-cfc2-4999-8e75-264a3b3e4f03 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.072455] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f89f83d-9410-4b05-ad1c-3096c7f32a0f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.085142] env[62627]: INFO nova.compute.manager [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Took 17.56 seconds to build instance. [ 1250.087121] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:2b:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a79e592-1487-4b96-93f2-66df8b0224a4', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1250.095747] env[62627]: DEBUG oslo.service.loopingcall [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1250.096687] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1250.096914] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a32e8df8-60d2-47a4-86d3-ab2b4d0fd609 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.117125] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec427b6-bf2c-4c80-bf2d-e1cee93df930 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.122514] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1250.122514] env[62627]: value = "task-2195290" [ 1250.122514] env[62627]: _type = "Task" [ 1250.122514] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.134776] env[62627]: DEBUG nova.compute.provider_tree [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1250.142476] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195290, 'name': CreateVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.372259] env[62627]: DEBUG nova.compute.manager [req-fe786bef-7c9f-4873-9e0a-9f093a3f69b7 req-b2a1946a-a6f6-41ba-87da-34fceb42059d service nova] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Received event network-vif-deleted-c9e055e0-0774-4741-a16c-139233d6803d {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1250.402624] env[62627]: DEBUG nova.objects.instance [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'pci_requests' on Instance uuid 1109aae0-62d2-4c55-8c68-f224f3de8b55 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1250.405613] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ee3b6c7c-69f3-4da6-a5b3-02fe40ec76b9 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-1034439a-d24f-4eb6-8139-67b012254146" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.785s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.597777] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ec10b454-e2d5-4fd9-a471-113226bcd8b9 tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "f195662c-9493-469a-ba36-d712f9944b28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.082s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.634288] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195290, 'name': CreateVM_Task, 'duration_secs': 0.381442} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.634467] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1250.635145] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.635318] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.635632] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1250.635889] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca420f23-ed5c-45f2-a563-a5ac6e360fc5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.639448] env[62627]: DEBUG nova.scheduler.client.report [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1250.650654] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1250.650654] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52475f6d-44e7-b274-ac0a-7bdd644fa747" [ 1250.650654] env[62627]: _type = "Task" [ 1250.650654] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.663578] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52475f6d-44e7-b274-ac0a-7bdd644fa747, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.736818] env[62627]: DEBUG nova.network.neutron [req-cb3cb467-b276-4f1f-ada7-59049f1d9fe9 req-ee5090d3-1ff7-4cdd-ac06-a81be4ee30ca service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updated VIF entry in instance network info cache for port 58815871-fdac-4406-a61e-4d7fca125122. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1250.737211] env[62627]: DEBUG nova.network.neutron [req-cb3cb467-b276-4f1f-ada7-59049f1d9fe9 req-ee5090d3-1ff7-4cdd-ac06-a81be4ee30ca service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updating instance_info_cache with network_info: [{"id": "58815871-fdac-4406-a61e-4d7fca125122", "address": "fa:16:3e:69:fc:43", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58815871-fd", "ovs_interfaceid": "58815871-fdac-4406-a61e-4d7fca125122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.906069] env[62627]: DEBUG nova.objects.base [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Object Instance<1109aae0-62d2-4c55-8c68-f224f3de8b55> lazy-loaded attributes: flavor,pci_requests {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1250.906872] env[62627]: DEBUG nova.network.neutron [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1251.026949] env[62627]: DEBUG nova.policy [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c5e5eee141e496db2d2ea316c43e6be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e4f7ad3ebcf4c4d97bdbce58eec5a09', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1251.149293] env[62627]: DEBUG oslo_concurrency.lockutils [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.835s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1251.152326] env[62627]: DEBUG oslo_concurrency.lockutils [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.123s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1251.152326] env[62627]: DEBUG nova.objects.instance [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lazy-loading 'resources' on Instance uuid 30db4963-095b-4297-ad7c-0606451bd2b6 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1251.167960] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52475f6d-44e7-b274-ac0a-7bdd644fa747, 'name': SearchDatastore_Task, 'duration_secs': 0.009687} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.167960] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.167960] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1251.167960] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1251.168253] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.168283] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1251.168549] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e26a5212-b194-4bdd-af8d-a2fa1eb53714 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.180303] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1251.180575] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1251.181371] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e793aa0a-c5e8-4844-8e70-6a82e018f51d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.189768] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1251.189768] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523a94d9-e22b-ce6a-9a41-59863380a045" [ 1251.189768] env[62627]: _type = "Task" [ 1251.189768] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.190805] env[62627]: INFO nova.scheduler.client.report [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted allocations for instance f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1 [ 1251.205441] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523a94d9-e22b-ce6a-9a41-59863380a045, 'name': SearchDatastore_Task, 'duration_secs': 0.010718} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.206438] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4472e93e-b915-42df-b5b4-28ef53ca7d25 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.215570] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1251.215570] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5207e87f-907a-108c-620b-bdbe817afe9e" [ 1251.215570] env[62627]: _type = "Task" [ 1251.215570] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.217248] env[62627]: DEBUG nova.compute.manager [req-fd448f03-fbcf-4743-b4ba-cca251160808 req-3bfdfc46-11e4-4181-ad51-5af12a0e47e0 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Received event network-changed-3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1251.217405] env[62627]: DEBUG nova.compute.manager [req-fd448f03-fbcf-4743-b4ba-cca251160808 req-3bfdfc46-11e4-4181-ad51-5af12a0e47e0 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Refreshing instance network info cache due to event network-changed-3166f86b-c852-4335-9c18-b4c68f51d8ab. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1251.217629] env[62627]: DEBUG oslo_concurrency.lockutils [req-fd448f03-fbcf-4743-b4ba-cca251160808 req-3bfdfc46-11e4-4181-ad51-5af12a0e47e0 service nova] Acquiring lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1251.217770] env[62627]: DEBUG oslo_concurrency.lockutils [req-fd448f03-fbcf-4743-b4ba-cca251160808 req-3bfdfc46-11e4-4181-ad51-5af12a0e47e0 service nova] Acquired lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.217928] env[62627]: DEBUG nova.network.neutron [req-fd448f03-fbcf-4743-b4ba-cca251160808 req-3bfdfc46-11e4-4181-ad51-5af12a0e47e0 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Refreshing network info cache for port 3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1251.228312] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5207e87f-907a-108c-620b-bdbe817afe9e, 'name': SearchDatastore_Task, 'duration_secs': 0.008835} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.229075] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.229360] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] b68042fd-4955-41e4-b14f-1f8268ace377/b68042fd-4955-41e4-b14f-1f8268ace377.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1251.229615] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ca986da-91f3-4b7d-97c7-1ba2c0bb97aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.236702] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1251.236702] env[62627]: value = "task-2195291" [ 1251.236702] env[62627]: _type = "Task" [ 1251.236702] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.241164] env[62627]: DEBUG oslo_concurrency.lockutils [req-cb3cb467-b276-4f1f-ada7-59049f1d9fe9 req-ee5090d3-1ff7-4cdd-ac06-a81be4ee30ca service nova] Releasing lock "refresh_cache-c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.247427] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.296864] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.703728] env[62627]: DEBUG oslo_concurrency.lockutils [None req-54eb72c2-36b9-4769-acd6-7009de254d0c tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.619s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1251.748259] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195291, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505386} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.748588] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] b68042fd-4955-41e4-b14f-1f8268ace377/b68042fd-4955-41e4-b14f-1f8268ace377.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1251.748812] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1251.749087] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-877a1fe9-4ce9-46db-a2e4-918da92e64ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.758786] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1251.758786] env[62627]: value = "task-2195292" [ 1251.758786] env[62627]: _type = "Task" [ 1251.758786] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.769096] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195292, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.845461] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8c12bb-f092-4d6e-bc63-07c41221c138 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.860101] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcff3c0-0f01-4a60-996b-ce59afb779f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.900020] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7854c8f-4d56-4728-893f-37952f25b2fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.907217] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ae02d7-a4f5-4345-81cb-1765bc48a37a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.921939] env[62627]: DEBUG nova.compute.provider_tree [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1252.035021] env[62627]: DEBUG nova.network.neutron [req-fd448f03-fbcf-4743-b4ba-cca251160808 req-3bfdfc46-11e4-4181-ad51-5af12a0e47e0 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updated VIF entry in instance network info cache for port 3166f86b-c852-4335-9c18-b4c68f51d8ab. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1252.035021] env[62627]: DEBUG nova.network.neutron [req-fd448f03-fbcf-4743-b4ba-cca251160808 req-3bfdfc46-11e4-4181-ad51-5af12a0e47e0 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updating instance_info_cache with network_info: [{"id": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "address": "fa:16:3e:7b:bd:70", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3166f86b-c8", "ovs_interfaceid": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.269512] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195292, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069019} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.269793] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1252.270677] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bafbed-4888-489b-9eb4-63fa11b349fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.294298] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] b68042fd-4955-41e4-b14f-1f8268ace377/b68042fd-4955-41e4-b14f-1f8268ace377.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1252.294617] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f6587e2-43d0-453a-b156-653bf8ecbf82 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.315719] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1252.315719] env[62627]: value = "task-2195293" [ 1252.315719] env[62627]: _type = "Task" [ 1252.315719] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.325251] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195293, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.426787] env[62627]: DEBUG nova.scheduler.client.report [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1252.538885] env[62627]: DEBUG oslo_concurrency.lockutils [req-fd448f03-fbcf-4743-b4ba-cca251160808 req-3bfdfc46-11e4-4181-ad51-5af12a0e47e0 service nova] Releasing lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1252.603316] env[62627]: DEBUG nova.compute.manager [req-1f0ebc68-4970-41f1-959b-21d89e1ee05a req-e86f5b51-82cd-46be-88e3-730ec61c8e60 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Received event network-vif-plugged-1034439a-d24f-4eb6-8139-67b012254146 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1252.604589] env[62627]: DEBUG oslo_concurrency.lockutils [req-1f0ebc68-4970-41f1-959b-21d89e1ee05a req-e86f5b51-82cd-46be-88e3-730ec61c8e60 service nova] Acquiring lock "1109aae0-62d2-4c55-8c68-f224f3de8b55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.604589] env[62627]: DEBUG oslo_concurrency.lockutils [req-1f0ebc68-4970-41f1-959b-21d89e1ee05a req-e86f5b51-82cd-46be-88e3-730ec61c8e60 service nova] Lock "1109aae0-62d2-4c55-8c68-f224f3de8b55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.604589] env[62627]: DEBUG oslo_concurrency.lockutils [req-1f0ebc68-4970-41f1-959b-21d89e1ee05a req-e86f5b51-82cd-46be-88e3-730ec61c8e60 service nova] Lock "1109aae0-62d2-4c55-8c68-f224f3de8b55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.605025] env[62627]: DEBUG nova.compute.manager [req-1f0ebc68-4970-41f1-959b-21d89e1ee05a req-e86f5b51-82cd-46be-88e3-730ec61c8e60 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] No waiting events found dispatching network-vif-plugged-1034439a-d24f-4eb6-8139-67b012254146 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1252.606534] env[62627]: WARNING nova.compute.manager [req-1f0ebc68-4970-41f1-959b-21d89e1ee05a req-e86f5b51-82cd-46be-88e3-730ec61c8e60 service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Received unexpected event network-vif-plugged-1034439a-d24f-4eb6-8139-67b012254146 for instance with vm_state active and task_state None. [ 1252.661135] env[62627]: DEBUG nova.network.neutron [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Successfully updated port: 1034439a-d24f-4eb6-8139-67b012254146 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1252.662394] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "f195662c-9493-469a-ba36-d712f9944b28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.662609] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "f195662c-9493-469a-ba36-d712f9944b28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.662800] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "f195662c-9493-469a-ba36-d712f9944b28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.662974] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "f195662c-9493-469a-ba36-d712f9944b28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.663156] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "f195662c-9493-469a-ba36-d712f9944b28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.665119] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquiring lock "5b001b38-d121-4e03-910e-54647536faf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.665321] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Lock "5b001b38-d121-4e03-910e-54647536faf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.666925] env[62627]: INFO nova.compute.manager [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Terminating instance [ 1252.799121] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1252.799481] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Cleaning up deleted instances with incomplete migration {{(pid=62627) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1252.825825] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195293, 'name': ReconfigVM_Task, 'duration_secs': 0.312512} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.826183] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Reconfigured VM instance instance-00000063 to attach disk [datastore2] b68042fd-4955-41e4-b14f-1f8268ace377/b68042fd-4955-41e4-b14f-1f8268ace377.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1252.826917] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff7c4fc2-3dd4-4121-9c31-38cddaf1bae1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.833324] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1252.833324] env[62627]: value = "task-2195294" [ 1252.833324] env[62627]: _type = "Task" [ 1252.833324] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.844272] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195294, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.930490] env[62627]: DEBUG oslo_concurrency.lockutils [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.778s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.957264] env[62627]: INFO nova.scheduler.client.report [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleted allocations for instance 30db4963-095b-4297-ad7c-0606451bd2b6 [ 1253.167726] env[62627]: DEBUG nova.compute.manager [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1253.171048] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1253.171230] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.171405] env[62627]: DEBUG nova.network.neutron [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1253.172675] env[62627]: DEBUG nova.compute.manager [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1253.173292] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1253.174057] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2b1dcd-7327-4075-91c9-fbd956736cd8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.183619] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1253.183619] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20edefa2-1127-4fbc-b5e2-3bb902af26b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.189319] env[62627]: DEBUG oslo_vmware.api [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1253.189319] env[62627]: value = "task-2195295" [ 1253.189319] env[62627]: _type = "Task" [ 1253.189319] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.198987] env[62627]: DEBUG oslo_vmware.api [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.344065] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195294, 'name': Rename_Task, 'duration_secs': 0.159921} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.344366] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1253.344615] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93e0503f-270c-4576-afc9-7b396ecfeca1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.351148] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1253.351148] env[62627]: value = "task-2195296" [ 1253.351148] env[62627]: _type = "Task" [ 1253.351148] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.360190] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195296, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.466542] env[62627]: DEBUG oslo_concurrency.lockutils [None req-97ea74a8-b7d5-4b57-9ac3-87770aada023 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "30db4963-095b-4297-ad7c-0606451bd2b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.592s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.695703] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.696877] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.698026] env[62627]: INFO nova.compute.claims [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1253.703629] env[62627]: DEBUG oslo_vmware.api [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195295, 'name': PowerOffVM_Task, 'duration_secs': 0.234783} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.703803] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1253.703984] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1253.704244] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9253b480-cbb2-4e99-933c-fbf12db74e30 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.718697] env[62627]: WARNING nova.network.neutron [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] a1c5c227-8f1f-4881-a6d8-f446b64d9846 already exists in list: networks containing: ['a1c5c227-8f1f-4881-a6d8-f446b64d9846']. ignoring it [ 1253.763143] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1253.763451] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1253.763701] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleting the datastore file [datastore2] f195662c-9493-469a-ba36-d712f9944b28 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1253.764429] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02807f0a-9ee0-4cb1-ad9b-a04de8f11a71 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.774887] env[62627]: DEBUG oslo_vmware.api [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for the task: (returnval){ [ 1253.774887] env[62627]: value = "task-2195298" [ 1253.774887] env[62627]: _type = "Task" [ 1253.774887] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.782752] env[62627]: DEBUG oslo_vmware.api [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.799445] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.799738] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Cleaning up deleted instances {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1253.865360] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195296, 'name': PowerOnVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.063399] env[62627]: DEBUG nova.network.neutron [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updating instance_info_cache with network_info: [{"id": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "address": "fa:16:3e:7b:bd:70", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3166f86b-c8", "ovs_interfaceid": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1034439a-d24f-4eb6-8139-67b012254146", "address": "fa:16:3e:74:24:f6", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1034439a-d2", "ovs_interfaceid": "1034439a-d24f-4eb6-8139-67b012254146", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.285622] env[62627]: DEBUG oslo_vmware.api [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Task: {'id': task-2195298, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295644} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.285877] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1254.286076] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1254.286258] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1254.286430] env[62627]: INFO nova.compute.manager [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] [instance: f195662c-9493-469a-ba36-d712f9944b28] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1254.286728] env[62627]: DEBUG oslo.service.loopingcall [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1254.286986] env[62627]: DEBUG nova.compute.manager [-] [instance: f195662c-9493-469a-ba36-d712f9944b28] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1254.287108] env[62627]: DEBUG nova.network.neutron [-] [instance: f195662c-9493-469a-ba36-d712f9944b28] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1254.314732] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] There are 49 instances to clean {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1254.314912] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: f2eefd04-2e4d-48b2-9895-ae58070db970] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1254.341202] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "7e340c99-8d15-4696-98d5-a952f5d2aeee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.341430] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "7e340c99-8d15-4696-98d5-a952f5d2aeee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.361622] env[62627]: DEBUG oslo_vmware.api [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195296, 'name': PowerOnVM_Task, 'duration_secs': 0.56305} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.361871] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1254.362083] env[62627]: DEBUG nova.compute.manager [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1254.362824] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b41eab-36c3-4bbe-8221-ebfeef3d33ee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.566163] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.566769] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.566930] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.567757] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98976cf0-5820-4481-b3e1-24350444d10e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.586956] env[62627]: DEBUG nova.virt.hardware [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1254.587208] env[62627]: DEBUG nova.virt.hardware [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1254.587365] env[62627]: DEBUG nova.virt.hardware [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1254.587544] env[62627]: DEBUG nova.virt.hardware [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1254.587688] env[62627]: DEBUG nova.virt.hardware [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1254.587831] env[62627]: DEBUG nova.virt.hardware [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1254.588044] env[62627]: DEBUG nova.virt.hardware [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1254.588225] env[62627]: DEBUG nova.virt.hardware [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1254.588399] env[62627]: DEBUG nova.virt.hardware [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1254.588565] env[62627]: DEBUG nova.virt.hardware [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1254.588767] env[62627]: DEBUG nova.virt.hardware [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1254.594893] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Reconfiguring VM to attach interface {{(pid=62627) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1254.595467] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fdc261d-0dbc-4f88-b16f-6bec4588be97 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.613505] env[62627]: DEBUG oslo_vmware.api [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1254.613505] env[62627]: value = "task-2195299" [ 1254.613505] env[62627]: _type = "Task" [ 1254.613505] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.621742] env[62627]: DEBUG oslo_vmware.api [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195299, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.630982] env[62627]: DEBUG nova.compute.manager [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Received event network-changed-1034439a-d24f-4eb6-8139-67b012254146 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1254.631182] env[62627]: DEBUG nova.compute.manager [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Refreshing instance network info cache due to event network-changed-1034439a-d24f-4eb6-8139-67b012254146. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1254.631387] env[62627]: DEBUG oslo_concurrency.lockutils [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] Acquiring lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.631529] env[62627]: DEBUG oslo_concurrency.lockutils [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] Acquired lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.631706] env[62627]: DEBUG nova.network.neutron [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Refreshing network info cache for port 1034439a-d24f-4eb6-8139-67b012254146 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1254.661915] env[62627]: DEBUG oslo_concurrency.lockutils [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.662223] env[62627]: DEBUG oslo_concurrency.lockutils [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.817997] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 4a4b57c8-251c-4ca8-b662-b96214110979] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1254.842520] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e62fc7-5f9f-4e20-91c2-2d49b3e62eca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.845861] env[62627]: DEBUG nova.compute.manager [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1254.853381] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8589cd-2211-47a7-95c2-1ba548ec34a4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.887600] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f047187f-bf44-4e6c-8e5e-fb471f9272b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.892163] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.897453] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b249c120-d6b7-4819-a6a0-b743b301f8e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.911019] env[62627]: DEBUG nova.compute.provider_tree [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.057058] env[62627]: DEBUG nova.network.neutron [-] [instance: f195662c-9493-469a-ba36-d712f9944b28] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.124140] env[62627]: DEBUG oslo_vmware.api [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195299, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.165109] env[62627]: INFO nova.compute.manager [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Detaching volume 98c96310-44b1-47ab-9c71-3099818174f5 [ 1255.208808] env[62627]: INFO nova.virt.block_device [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Attempting to driver detach volume 98c96310-44b1-47ab-9c71-3099818174f5 from mountpoint /dev/sdb [ 1255.209013] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1255.209694] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447800', 'volume_id': '98c96310-44b1-47ab-9c71-3099818174f5', 'name': 'volume-98c96310-44b1-47ab-9c71-3099818174f5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3001147e-7be3-4f72-b330-8f99bf7cf3ea', 'attached_at': '', 'detached_at': '', 'volume_id': '98c96310-44b1-47ab-9c71-3099818174f5', 'serial': '98c96310-44b1-47ab-9c71-3099818174f5'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1255.210444] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff88270-6bf8-445a-86c2-eac00d359cd0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.234902] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f7c913-c2e4-4b89-9f9c-d5178b85aaf9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.242585] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9aae73-438c-4e83-bd4d-15e8724e1c84 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.265761] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbf7e14-63bb-444c-86ab-4816c52e5229 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.281312] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] The volume has not been displaced from its original location: [datastore2] volume-98c96310-44b1-47ab-9c71-3099818174f5/volume-98c96310-44b1-47ab-9c71-3099818174f5.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1255.286421] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Reconfiguring VM instance instance-00000059 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1255.286739] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b27e16d-aeaf-4889-847d-de06a753e586 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.307687] env[62627]: DEBUG oslo_vmware.api [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1255.307687] env[62627]: value = "task-2195300" [ 1255.307687] env[62627]: _type = "Task" [ 1255.307687] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.315521] env[62627]: DEBUG oslo_vmware.api [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195300, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.323168] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 30db4963-095b-4297-ad7c-0606451bd2b6] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1255.367225] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.400176] env[62627]: DEBUG nova.network.neutron [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updated VIF entry in instance network info cache for port 1034439a-d24f-4eb6-8139-67b012254146. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1255.400645] env[62627]: DEBUG nova.network.neutron [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updating instance_info_cache with network_info: [{"id": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "address": "fa:16:3e:7b:bd:70", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3166f86b-c8", "ovs_interfaceid": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1034439a-d24f-4eb6-8139-67b012254146", "address": "fa:16:3e:74:24:f6", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1034439a-d2", "ovs_interfaceid": "1034439a-d24f-4eb6-8139-67b012254146", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.413900] env[62627]: DEBUG nova.scheduler.client.report [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1255.562887] env[62627]: INFO nova.compute.manager [-] [instance: f195662c-9493-469a-ba36-d712f9944b28] Took 1.28 seconds to deallocate network for instance. [ 1255.625720] env[62627]: DEBUG oslo_vmware.api [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195299, 'name': ReconfigVM_Task, 'duration_secs': 0.594132} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.626583] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1255.626784] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Reconfigured VM to attach interface {{(pid=62627) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1255.817243] env[62627]: DEBUG oslo_vmware.api [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195300, 'name': ReconfigVM_Task, 'duration_secs': 0.456276} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.817526] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Reconfigured VM instance instance-00000059 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1255.822525] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8284732-966e-4381-8d46-8d61fe0ef457 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.832323] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 722d7bce-f0d7-4891-aa48-0ead10786a6b] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1255.839297] env[62627]: DEBUG oslo_vmware.api [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1255.839297] env[62627]: value = "task-2195301" [ 1255.839297] env[62627]: _type = "Task" [ 1255.839297] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.846983] env[62627]: DEBUG oslo_vmware.api [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195301, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.903649] env[62627]: DEBUG oslo_concurrency.lockutils [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] Releasing lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1255.903919] env[62627]: DEBUG nova.compute.manager [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] Received event network-vif-deleted-20bdf50e-6e1b-46c8-ae6a-26874ecb62a9 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1255.904112] env[62627]: INFO nova.compute.manager [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] Neutron deleted interface 20bdf50e-6e1b-46c8-ae6a-26874ecb62a9; detaching it from the instance and deleting it from the info cache [ 1255.904288] env[62627]: DEBUG nova.network.neutron [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.919291] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.919853] env[62627]: DEBUG nova.compute.manager [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1255.922379] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.030s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.922962] env[62627]: DEBUG nova.objects.instance [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1256.070110] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.131452] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4bb9e2e3-f9b6-4535-aff7-0965e5b576c8 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-1109aae0-62d2-4c55-8c68-f224f3de8b55-1034439a-d24f-4eb6-8139-67b012254146" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.420s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.335520] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 36a8d4d3-d6b6-4b42-9514-0e29ee45aea9] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1256.348825] env[62627]: DEBUG oslo_vmware.api [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195301, 'name': ReconfigVM_Task, 'duration_secs': 0.149329} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.349144] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447800', 'volume_id': '98c96310-44b1-47ab-9c71-3099818174f5', 'name': 'volume-98c96310-44b1-47ab-9c71-3099818174f5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3001147e-7be3-4f72-b330-8f99bf7cf3ea', 'attached_at': '', 'detached_at': '', 'volume_id': '98c96310-44b1-47ab-9c71-3099818174f5', 'serial': '98c96310-44b1-47ab-9c71-3099818174f5'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1256.407052] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec563d88-eaec-4187-a9d3-4033a3da49c6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.416285] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ba9b36-12b0-4255-8130-f4cf38ae0d52 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.431122] env[62627]: DEBUG nova.compute.utils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1256.432488] env[62627]: DEBUG nova.compute.manager [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1256.432710] env[62627]: DEBUG nova.network.neutron [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1256.453556] env[62627]: DEBUG nova.compute.manager [req-a61a05c6-b25e-4e50-b16b-965b83437d54 req-fdbd03b5-ac73-4228-8bc8-931033f5dabc service nova] [instance: f195662c-9493-469a-ba36-d712f9944b28] Detach interface failed, port_id=20bdf50e-6e1b-46c8-ae6a-26874ecb62a9, reason: Instance f195662c-9493-469a-ba36-d712f9944b28 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1256.475832] env[62627]: DEBUG nova.policy [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae191002866a42c387e5d38dff6a98aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8baa28640bef4e45aebec251dbb31b1a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1256.839383] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: c25fe185-2d6a-40a8-be06-84ee22a5464e] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1256.890869] env[62627]: DEBUG nova.objects.instance [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lazy-loading 'flavor' on Instance uuid 3001147e-7be3-4f72-b330-8f99bf7cf3ea {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1256.934174] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8bcc135f-075c-4afa-80ad-2b95733f4895 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.936217] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.569s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.937663] env[62627]: INFO nova.compute.claims [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1256.940498] env[62627]: DEBUG nova.compute.manager [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1257.026861] env[62627]: DEBUG nova.network.neutron [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Successfully created port: 8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1257.342986] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 367a83b9-79f8-43b2-91dd-40d13193fa40] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1257.624476] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "interface-1109aae0-62d2-4c55-8c68-f224f3de8b55-1034439a-d24f-4eb6-8139-67b012254146" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.624762] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-1109aae0-62d2-4c55-8c68-f224f3de8b55-1034439a-d24f-4eb6-8139-67b012254146" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1257.847265] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 28e42d69-0c19-4e3c-ae49-9076fdc65aa8] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1257.897533] env[62627]: DEBUG oslo_concurrency.lockutils [None req-35dfced3-8000-434c-8cd3-9bea0669938c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.235s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.948261] env[62627]: DEBUG nova.compute.manager [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1257.973242] env[62627]: DEBUG nova.virt.hardware [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1257.973485] env[62627]: DEBUG nova.virt.hardware [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1257.973646] env[62627]: DEBUG nova.virt.hardware [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1257.973828] env[62627]: DEBUG nova.virt.hardware [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1257.973976] env[62627]: DEBUG nova.virt.hardware [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1257.974139] env[62627]: DEBUG nova.virt.hardware [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1257.974345] env[62627]: DEBUG nova.virt.hardware [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1257.974505] env[62627]: DEBUG nova.virt.hardware [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1257.974668] env[62627]: DEBUG nova.virt.hardware [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1257.974824] env[62627]: DEBUG nova.virt.hardware [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1257.975060] env[62627]: DEBUG nova.virt.hardware [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1257.976153] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa23fc1-f057-4c55-a72a-23d89d7bfb87 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.986382] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa98e473-7cd5-4867-a202-0bd845a72e5a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.067554] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb676b7-ad17-4cbf-b72b-8f5cfd1fe142 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.074901] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ad9737-5d42-4db6-8cb8-1dbc1a50ea0f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.105815] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac33a921-1202-4f8f-a11a-e00248bc0e0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.113362] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c548b75-051d-4875-bfa3-ac9e8a0a35f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.126396] env[62627]: DEBUG nova.compute.provider_tree [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.128048] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1258.128048] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.128936] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c49c644-13f2-4226-b78c-af6726d542e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.145569] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9540b164-b777-4f74-bdf4-18c75a11a6be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.170951] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Reconfiguring VM to detach interface {{(pid=62627) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1258.171255] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22bdedd9-b24d-48e3-ae07-c9729967aaa6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.190549] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1258.190549] env[62627]: value = "task-2195302" [ 1258.190549] env[62627]: _type = "Task" [ 1258.190549] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.198325] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.350253] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 634d547b-3b5e-4185-8229-daf055ca6e69] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1258.440834] env[62627]: DEBUG nova.compute.manager [req-9ced0ae0-695b-4063-b1b9-4403f34b8bc9 req-b6a44eaa-e9c4-47d9-b764-3641c355840e service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Received event network-vif-plugged-8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1258.440834] env[62627]: DEBUG oslo_concurrency.lockutils [req-9ced0ae0-695b-4063-b1b9-4403f34b8bc9 req-b6a44eaa-e9c4-47d9-b764-3641c355840e service nova] Acquiring lock "5b001b38-d121-4e03-910e-54647536faf5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.440924] env[62627]: DEBUG oslo_concurrency.lockutils [req-9ced0ae0-695b-4063-b1b9-4403f34b8bc9 req-b6a44eaa-e9c4-47d9-b764-3641c355840e service nova] Lock "5b001b38-d121-4e03-910e-54647536faf5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.441071] env[62627]: DEBUG oslo_concurrency.lockutils [req-9ced0ae0-695b-4063-b1b9-4403f34b8bc9 req-b6a44eaa-e9c4-47d9-b764-3641c355840e service nova] Lock "5b001b38-d121-4e03-910e-54647536faf5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.441243] env[62627]: DEBUG nova.compute.manager [req-9ced0ae0-695b-4063-b1b9-4403f34b8bc9 req-b6a44eaa-e9c4-47d9-b764-3641c355840e service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] No waiting events found dispatching network-vif-plugged-8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1258.441409] env[62627]: WARNING nova.compute.manager [req-9ced0ae0-695b-4063-b1b9-4403f34b8bc9 req-b6a44eaa-e9c4-47d9-b764-3641c355840e service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Received unexpected event network-vif-plugged-8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8 for instance with vm_state building and task_state spawning. [ 1258.572110] env[62627]: DEBUG nova.network.neutron [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Successfully updated port: 8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1258.631042] env[62627]: DEBUG nova.scheduler.client.report [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1258.701251] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.858127] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: abb14e3e-b521-4f21-bc94-6bbb66fbee57] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1258.981238] env[62627]: DEBUG oslo_concurrency.lockutils [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.981539] env[62627]: DEBUG oslo_concurrency.lockutils [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.981859] env[62627]: DEBUG oslo_concurrency.lockutils [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.982128] env[62627]: DEBUG oslo_concurrency.lockutils [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.982359] env[62627]: DEBUG oslo_concurrency.lockutils [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.984801] env[62627]: INFO nova.compute.manager [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Terminating instance [ 1259.074610] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquiring lock "refresh_cache-5b001b38-d121-4e03-910e-54647536faf5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1259.074772] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquired lock "refresh_cache-5b001b38-d121-4e03-910e-54647536faf5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.074907] env[62627]: DEBUG nova.network.neutron [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1259.134950] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.199s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1259.135467] env[62627]: DEBUG nova.compute.manager [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1259.137934] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.068s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.138216] env[62627]: DEBUG nova.objects.instance [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lazy-loading 'resources' on Instance uuid f195662c-9493-469a-ba36-d712f9944b28 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.202507] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.361430] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: b282093d-8013-436d-94d2-bc26161456ea] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1259.488552] env[62627]: DEBUG nova.compute.manager [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1259.488814] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1259.489726] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc82dbe-4562-46ad-bdb9-7e6e071bcd98 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.499338] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1259.499603] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69610081-1357-45b3-bf2d-8a9b7b1116f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.505835] env[62627]: DEBUG oslo_vmware.api [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1259.505835] env[62627]: value = "task-2195303" [ 1259.505835] env[62627]: _type = "Task" [ 1259.505835] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.514026] env[62627]: DEBUG oslo_vmware.api [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195303, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.610248] env[62627]: DEBUG nova.network.neutron [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1259.641561] env[62627]: DEBUG nova.compute.utils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1259.645569] env[62627]: DEBUG nova.compute.manager [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1259.645741] env[62627]: DEBUG nova.network.neutron [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1259.709921] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.713279] env[62627]: DEBUG nova.policy [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ade098da576147a6bff6685cb886ff78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a97f57bdf4a46db80460b1d5c9f9b9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1259.796682] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d21d894-dd33-4379-93cc-d2dfc9eeea45 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.811438] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430ec263-66ca-4701-b337-e698a463e013 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.845279] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d7276c-3563-459d-909c-711689f46651 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.858550] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa18284d-00d5-462e-80aa-3b0fbf30bc55 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.864242] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: b7fd9a2a-1960-4dba-93af-2dd5fe30a90d] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1259.874861] env[62627]: DEBUG nova.compute.provider_tree [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1259.877506] env[62627]: DEBUG nova.network.neutron [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Updating instance_info_cache with network_info: [{"id": "8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8", "address": "fa:16:3e:c6:27:14", "network": {"id": "c638550c-4fb3-4d42-8d7e-e2722fddeece", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-309277593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8baa28640bef4e45aebec251dbb31b1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bed837fa-6b6a-4192-a229-a99426a46065", "external-id": "nsx-vlan-transportzone-954", "segmentation_id": 954, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8875fdf7-7f", "ovs_interfaceid": "8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.015935] env[62627]: DEBUG oslo_vmware.api [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195303, 'name': PowerOffVM_Task, 'duration_secs': 0.181609} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.016236] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1260.016417] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1260.016679] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a72ff2e9-8882-4a07-aafb-72038e13eb22 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.086262] env[62627]: DEBUG nova.network.neutron [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Successfully created port: 2cabec54-95a1-48db-b0ba-596ee7f4671e {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1260.106474] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1260.107777] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1260.108011] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleting the datastore file [datastore1] 3001147e-7be3-4f72-b330-8f99bf7cf3ea {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1260.108337] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cafd2468-b531-4428-97c8-42f11ebbb158 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.114840] env[62627]: DEBUG oslo_vmware.api [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1260.114840] env[62627]: value = "task-2195305" [ 1260.114840] env[62627]: _type = "Task" [ 1260.114840] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.123457] env[62627]: DEBUG oslo_vmware.api [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.149667] env[62627]: DEBUG nova.compute.manager [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1260.207893] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.376017] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: f1d87fd3-1d7b-4d51-8d6c-8b03d42003b1] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1260.378553] env[62627]: DEBUG nova.scheduler.client.report [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1260.381804] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Releasing lock "refresh_cache-5b001b38-d121-4e03-910e-54647536faf5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1260.382077] env[62627]: DEBUG nova.compute.manager [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Instance network_info: |[{"id": "8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8", "address": "fa:16:3e:c6:27:14", "network": {"id": "c638550c-4fb3-4d42-8d7e-e2722fddeece", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-309277593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8baa28640bef4e45aebec251dbb31b1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bed837fa-6b6a-4192-a229-a99426a46065", "external-id": "nsx-vlan-transportzone-954", "segmentation_id": 954, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8875fdf7-7f", "ovs_interfaceid": "8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1260.382631] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:27:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bed837fa-6b6a-4192-a229-a99426a46065', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1260.390067] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Creating folder: Project (8baa28640bef4e45aebec251dbb31b1a). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1260.390339] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d623a77-9eeb-4ea4-938c-4dc4c5985bee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.402627] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Created folder: Project (8baa28640bef4e45aebec251dbb31b1a) in parent group-v447541. [ 1260.402824] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Creating folder: Instances. Parent ref: group-v447824. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1260.403065] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef1beb41-ffe1-4430-8caa-295111567d9e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.413641] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Created folder: Instances in parent group-v447824. [ 1260.413862] env[62627]: DEBUG oslo.service.loopingcall [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1260.414049] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1260.414247] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc5d33da-9f31-44ed-ab86-dfa7cff72f11 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.432322] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1260.432322] env[62627]: value = "task-2195308" [ 1260.432322] env[62627]: _type = "Task" [ 1260.432322] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.439410] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195308, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.463880] env[62627]: DEBUG nova.compute.manager [req-0ff168c9-43bf-4bc8-98cc-cb35f116c9dc req-c4eb7dd1-556e-430b-9bd1-142cc45f4d5a service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Received event network-changed-8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1260.464102] env[62627]: DEBUG nova.compute.manager [req-0ff168c9-43bf-4bc8-98cc-cb35f116c9dc req-c4eb7dd1-556e-430b-9bd1-142cc45f4d5a service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Refreshing instance network info cache due to event network-changed-8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1260.464325] env[62627]: DEBUG oslo_concurrency.lockutils [req-0ff168c9-43bf-4bc8-98cc-cb35f116c9dc req-c4eb7dd1-556e-430b-9bd1-142cc45f4d5a service nova] Acquiring lock "refresh_cache-5b001b38-d121-4e03-910e-54647536faf5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1260.464473] env[62627]: DEBUG oslo_concurrency.lockutils [req-0ff168c9-43bf-4bc8-98cc-cb35f116c9dc req-c4eb7dd1-556e-430b-9bd1-142cc45f4d5a service nova] Acquired lock "refresh_cache-5b001b38-d121-4e03-910e-54647536faf5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.464629] env[62627]: DEBUG nova.network.neutron [req-0ff168c9-43bf-4bc8-98cc-cb35f116c9dc req-c4eb7dd1-556e-430b-9bd1-142cc45f4d5a service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Refreshing network info cache for port 8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1260.627642] env[62627]: DEBUG oslo_vmware.api [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156433} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.628087] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1260.628378] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1260.628599] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1260.628818] env[62627]: INFO nova.compute.manager [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1260.629150] env[62627]: DEBUG oslo.service.loopingcall [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1260.629400] env[62627]: DEBUG nova.compute.manager [-] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1260.629526] env[62627]: DEBUG nova.network.neutron [-] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1260.708028] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.884516] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: c0e4ea33-2397-4716-8f6a-603fe099fdc5] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1260.889249] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.748s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.914622] env[62627]: INFO nova.scheduler.client.report [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Deleted allocations for instance f195662c-9493-469a-ba36-d712f9944b28 [ 1260.943502] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195308, 'name': CreateVM_Task, 'duration_secs': 0.319559} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.943675] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1260.944373] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1260.944534] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.944859] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1260.945127] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a9ca487-cce0-4693-a852-d9f32044da9b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.949795] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for the task: (returnval){ [ 1260.949795] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5242be7d-88e8-3454-fd16-960fbd1f308e" [ 1260.949795] env[62627]: _type = "Task" [ 1260.949795] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.958338] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5242be7d-88e8-3454-fd16-960fbd1f308e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.160476] env[62627]: DEBUG nova.compute.manager [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1261.187532] env[62627]: DEBUG nova.network.neutron [req-0ff168c9-43bf-4bc8-98cc-cb35f116c9dc req-c4eb7dd1-556e-430b-9bd1-142cc45f4d5a service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Updated VIF entry in instance network info cache for port 8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1261.187532] env[62627]: DEBUG nova.network.neutron [req-0ff168c9-43bf-4bc8-98cc-cb35f116c9dc req-c4eb7dd1-556e-430b-9bd1-142cc45f4d5a service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Updating instance_info_cache with network_info: [{"id": "8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8", "address": "fa:16:3e:c6:27:14", "network": {"id": "c638550c-4fb3-4d42-8d7e-e2722fddeece", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-309277593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8baa28640bef4e45aebec251dbb31b1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bed837fa-6b6a-4192-a229-a99426a46065", "external-id": "nsx-vlan-transportzone-954", "segmentation_id": 954, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8875fdf7-7f", "ovs_interfaceid": "8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.192287] env[62627]: DEBUG nova.virt.hardware [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1261.192529] env[62627]: DEBUG nova.virt.hardware [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1261.192702] env[62627]: DEBUG nova.virt.hardware [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1261.192896] env[62627]: DEBUG nova.virt.hardware [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1261.193062] env[62627]: DEBUG nova.virt.hardware [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1261.193239] env[62627]: DEBUG nova.virt.hardware [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1261.193533] env[62627]: DEBUG nova.virt.hardware [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1261.193629] env[62627]: DEBUG nova.virt.hardware [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1261.193806] env[62627]: DEBUG nova.virt.hardware [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1261.193970] env[62627]: DEBUG nova.virt.hardware [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1261.194170] env[62627]: DEBUG nova.virt.hardware [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1261.195057] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e71fa0-c3ae-4a72-8c89-d68082e2b022 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.204991] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0f340e-0fd7-48c1-a5cd-f6a2c1eb71de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.213119] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.393279] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 9e0693d9-077a-4a02-b1c5-40fdddcdd27d] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1261.423063] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b4d9d3d4-f83c-4b6e-a956-29a7b3b73f6d tempest-ServerDiskConfigTestJSON-1565889928 tempest-ServerDiskConfigTestJSON-1565889928-project-member] Lock "f195662c-9493-469a-ba36-d712f9944b28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.760s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.461143] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5242be7d-88e8-3454-fd16-960fbd1f308e, 'name': SearchDatastore_Task, 'duration_secs': 0.009616} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.461681] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1261.462193] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1261.462295] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1261.462497] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.462737] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1261.463084] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8234f10b-4d8e-4f87-9f01-013401779de5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.475890] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1261.476173] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1261.476930] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f888b07f-c4eb-4e97-831c-e3d0596c0430 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.482810] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for the task: (returnval){ [ 1261.482810] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d6928e-64b1-d111-acb5-e644558c5759" [ 1261.482810] env[62627]: _type = "Task" [ 1261.482810] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.490984] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d6928e-64b1-d111-acb5-e644558c5759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.590449] env[62627]: DEBUG nova.network.neutron [-] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.597153] env[62627]: DEBUG nova.network.neutron [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Successfully updated port: 2cabec54-95a1-48db-b0ba-596ee7f4671e {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1261.689801] env[62627]: DEBUG oslo_concurrency.lockutils [req-0ff168c9-43bf-4bc8-98cc-cb35f116c9dc req-c4eb7dd1-556e-430b-9bd1-142cc45f4d5a service nova] Releasing lock "refresh_cache-5b001b38-d121-4e03-910e-54647536faf5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1261.708719] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.896362] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 0ee04bf9-6aa6-485c-a979-191d52af236d] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1261.993923] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d6928e-64b1-d111-acb5-e644558c5759, 'name': SearchDatastore_Task, 'duration_secs': 0.020206} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.994816] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01100923-776b-4b5e-9664-c6185e2b9da4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.999954] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for the task: (returnval){ [ 1261.999954] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5212cdb0-2fd9-697a-3b0b-1ccad946111c" [ 1261.999954] env[62627]: _type = "Task" [ 1261.999954] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.007801] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5212cdb0-2fd9-697a-3b0b-1ccad946111c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.094928] env[62627]: INFO nova.compute.manager [-] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Took 1.47 seconds to deallocate network for instance. [ 1262.099819] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "refresh_cache-7e340c99-8d15-4696-98d5-a952f5d2aeee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1262.099946] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "refresh_cache-7e340c99-8d15-4696-98d5-a952f5d2aeee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1262.100100] env[62627]: DEBUG nova.network.neutron [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1262.209842] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.399571] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: aca062be-755d-4e41-b9ed-f28d2d3bf598] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1262.493350] env[62627]: DEBUG nova.compute.manager [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Received event network-vif-deleted-9db88239-0a75-4952-8e09-6a6636c9ecb0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1262.493598] env[62627]: DEBUG nova.compute.manager [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Received event network-vif-plugged-2cabec54-95a1-48db-b0ba-596ee7f4671e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1262.493795] env[62627]: DEBUG oslo_concurrency.lockutils [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] Acquiring lock "7e340c99-8d15-4696-98d5-a952f5d2aeee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.494484] env[62627]: DEBUG oslo_concurrency.lockutils [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] Lock "7e340c99-8d15-4696-98d5-a952f5d2aeee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.494484] env[62627]: DEBUG oslo_concurrency.lockutils [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] Lock "7e340c99-8d15-4696-98d5-a952f5d2aeee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.494484] env[62627]: DEBUG nova.compute.manager [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] No waiting events found dispatching network-vif-plugged-2cabec54-95a1-48db-b0ba-596ee7f4671e {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1262.494769] env[62627]: WARNING nova.compute.manager [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Received unexpected event network-vif-plugged-2cabec54-95a1-48db-b0ba-596ee7f4671e for instance with vm_state building and task_state spawning. [ 1262.494769] env[62627]: DEBUG nova.compute.manager [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Received event network-changed-2cabec54-95a1-48db-b0ba-596ee7f4671e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1262.494924] env[62627]: DEBUG nova.compute.manager [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Refreshing instance network info cache due to event network-changed-2cabec54-95a1-48db-b0ba-596ee7f4671e. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1262.495105] env[62627]: DEBUG oslo_concurrency.lockutils [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] Acquiring lock "refresh_cache-7e340c99-8d15-4696-98d5-a952f5d2aeee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1262.511912] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5212cdb0-2fd9-697a-3b0b-1ccad946111c, 'name': SearchDatastore_Task, 'duration_secs': 0.008693} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.512165] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1262.512400] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 5b001b38-d121-4e03-910e-54647536faf5/5b001b38-d121-4e03-910e-54647536faf5.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1262.512858] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7608cdfb-c5b7-479b-87cc-d390d5212d1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.522194] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for the task: (returnval){ [ 1262.522194] env[62627]: value = "task-2195309" [ 1262.522194] env[62627]: _type = "Task" [ 1262.522194] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.530697] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.603206] env[62627]: DEBUG oslo_concurrency.lockutils [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.603496] env[62627]: DEBUG oslo_concurrency.lockutils [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.603772] env[62627]: DEBUG nova.objects.instance [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lazy-loading 'resources' on Instance uuid 3001147e-7be3-4f72-b330-8f99bf7cf3ea {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1262.639746] env[62627]: DEBUG nova.network.neutron [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1262.712138] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.796121] env[62627]: DEBUG nova.network.neutron [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Updating instance_info_cache with network_info: [{"id": "2cabec54-95a1-48db-b0ba-596ee7f4671e", "address": "fa:16:3e:cb:3f:26", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cabec54-95", "ovs_interfaceid": "2cabec54-95a1-48db-b0ba-596ee7f4671e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.903212] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 78307469-ebbe-45bc-a146-eaa298e9b707] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1263.033548] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512143} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.033830] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 5b001b38-d121-4e03-910e-54647536faf5/5b001b38-d121-4e03-910e-54647536faf5.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1263.034054] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1263.034312] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a14b27b9-e49e-4f39-9ac4-712180dee7b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.040180] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for the task: (returnval){ [ 1263.040180] env[62627]: value = "task-2195310" [ 1263.040180] env[62627]: _type = "Task" [ 1263.040180] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.048182] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.214944] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.298807] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "refresh_cache-7e340c99-8d15-4696-98d5-a952f5d2aeee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1263.299308] env[62627]: DEBUG nova.compute.manager [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Instance network_info: |[{"id": "2cabec54-95a1-48db-b0ba-596ee7f4671e", "address": "fa:16:3e:cb:3f:26", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cabec54-95", "ovs_interfaceid": "2cabec54-95a1-48db-b0ba-596ee7f4671e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1263.299513] env[62627]: DEBUG oslo_concurrency.lockutils [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] Acquired lock "refresh_cache-7e340c99-8d15-4696-98d5-a952f5d2aeee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.299813] env[62627]: DEBUG nova.network.neutron [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Refreshing network info cache for port 2cabec54-95a1-48db-b0ba-596ee7f4671e {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1263.301253] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:3f:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f499bc9-78da-46c1-9274-19edf26d31cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cabec54-95a1-48db-b0ba-596ee7f4671e', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1263.309350] env[62627]: DEBUG oslo.service.loopingcall [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1263.312186] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1263.313242] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e996558-04f9-403b-9aab-89197fcc9484 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.337819] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1263.337819] env[62627]: value = "task-2195311" [ 1263.337819] env[62627]: _type = "Task" [ 1263.337819] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.349956] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195311, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.406273] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: e71794b9-22eb-4643-8587-f12a198024a4] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1263.411624] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b845e5-655c-4c3a-b45a-ba7203e149f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.419365] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e89366-184f-485c-8b5a-dd52c7a6cb8c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.452321] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a6b737-7df3-4e3f-a6ff-744fc0b80ed8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.461196] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a99814-7b8b-47ad-9d2f-c582ab1fd185 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.476336] env[62627]: DEBUG nova.compute.provider_tree [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1263.552238] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195310, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066214} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.552529] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1263.553387] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5b4f20-efca-41c1-a7d4-2a8d25b5bd96 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.576313] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 5b001b38-d121-4e03-910e-54647536faf5/5b001b38-d121-4e03-910e-54647536faf5.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1263.576610] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c60acc2-cab0-4434-b868-206917c39f6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.597787] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for the task: (returnval){ [ 1263.597787] env[62627]: value = "task-2195312" [ 1263.597787] env[62627]: _type = "Task" [ 1263.597787] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.605933] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195312, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.711917] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.850680] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195311, 'name': CreateVM_Task, 'duration_secs': 0.363541} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.850880] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1263.851545] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1263.851707] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.852030] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1263.852285] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0df7c61e-5014-4891-960c-6d6bdf31ed24 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.856933] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1263.856933] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5260b8b4-941b-1bda-ec69-2a13095142ee" [ 1263.856933] env[62627]: _type = "Task" [ 1263.856933] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.864469] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5260b8b4-941b-1bda-ec69-2a13095142ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.911209] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 3e449c2b-b8e4-4f4e-885e-3bbf0a4d19a7] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1263.980391] env[62627]: DEBUG nova.scheduler.client.report [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1264.014295] env[62627]: DEBUG nova.network.neutron [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Updated VIF entry in instance network info cache for port 2cabec54-95a1-48db-b0ba-596ee7f4671e. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1264.014621] env[62627]: DEBUG nova.network.neutron [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Updating instance_info_cache with network_info: [{"id": "2cabec54-95a1-48db-b0ba-596ee7f4671e", "address": "fa:16:3e:cb:3f:26", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cabec54-95", "ovs_interfaceid": "2cabec54-95a1-48db-b0ba-596ee7f4671e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.108080] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195312, 'name': ReconfigVM_Task, 'duration_secs': 0.290115} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.108396] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 5b001b38-d121-4e03-910e-54647536faf5/5b001b38-d121-4e03-910e-54647536faf5.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1264.109047] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b5a3787-8798-40a2-9500-9102db4c4cdd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.115172] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for the task: (returnval){ [ 1264.115172] env[62627]: value = "task-2195313" [ 1264.115172] env[62627]: _type = "Task" [ 1264.115172] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.122665] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195313, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.213199] env[62627]: DEBUG oslo_vmware.api [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195302, 'name': ReconfigVM_Task, 'duration_secs': 5.755348} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.213552] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1264.213853] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Reconfigured VM to detach interface {{(pid=62627) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1264.368452] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5260b8b4-941b-1bda-ec69-2a13095142ee, 'name': SearchDatastore_Task, 'duration_secs': 0.010382} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.368767] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1264.369040] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1264.369297] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1264.369465] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.369643] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1264.369966] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72412d90-938b-4c55-9706-7f2c4da3bcf0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.378470] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1264.378641] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1264.379405] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8235b9f-e900-4ba6-8e15-b5e769c72753 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.384617] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1264.384617] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e47f80-3df3-e326-33bd-a3fee0bd9e7a" [ 1264.384617] env[62627]: _type = "Task" [ 1264.384617] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.392145] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e47f80-3df3-e326-33bd-a3fee0bd9e7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.413922] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 5edf4b5b-93b1-4efc-80ec-f3a7009d5449] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1264.488285] env[62627]: DEBUG oslo_concurrency.lockutils [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.885s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.511046] env[62627]: INFO nova.scheduler.client.report [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleted allocations for instance 3001147e-7be3-4f72-b330-8f99bf7cf3ea [ 1264.517173] env[62627]: DEBUG oslo_concurrency.lockutils [req-2925ad79-215f-4f5a-aa4c-0d0514d9172d req-81990e74-c63b-400c-9e26-4581f87a6539 service nova] Releasing lock "refresh_cache-7e340c99-8d15-4696-98d5-a952f5d2aeee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1264.625502] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195313, 'name': Rename_Task, 'duration_secs': 0.129676} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.625782] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1264.626037] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfdd73fb-a259-4b14-8ff4-a08bf904c892 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.632863] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for the task: (returnval){ [ 1264.632863] env[62627]: value = "task-2195314" [ 1264.632863] env[62627]: _type = "Task" [ 1264.632863] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.640236] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195314, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.895299] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e47f80-3df3-e326-33bd-a3fee0bd9e7a, 'name': SearchDatastore_Task, 'duration_secs': 0.008316} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.896095] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-777a5df1-de50-4326-9c1e-66829637b120 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.901359] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1264.901359] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c81620-058b-4786-d0fd-291946384ee7" [ 1264.901359] env[62627]: _type = "Task" [ 1264.901359] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.909105] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c81620-058b-4786-d0fd-291946384ee7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.917890] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 61f54696-1ce2-4515-ab9c-72347dbd9de0] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1265.018719] env[62627]: DEBUG oslo_concurrency.lockutils [None req-efc5541c-c6c9-4037-9dd6-bf4918ed5b44 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "3001147e-7be3-4f72-b330-8f99bf7cf3ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.037s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1265.142600] env[62627]: DEBUG oslo_vmware.api [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195314, 'name': PowerOnVM_Task, 'duration_secs': 0.441587} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.142859] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1265.143070] env[62627]: INFO nova.compute.manager [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Took 7.19 seconds to spawn the instance on the hypervisor. [ 1265.143254] env[62627]: DEBUG nova.compute.manager [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1265.144012] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682d9abc-c9d0-444b-8645-412fc4f71f2f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.411620] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c81620-058b-4786-d0fd-291946384ee7, 'name': SearchDatastore_Task, 'duration_secs': 0.018867} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.411837] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1265.412117] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 7e340c99-8d15-4696-98d5-a952f5d2aeee/7e340c99-8d15-4696-98d5-a952f5d2aeee.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1265.412437] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-589dfb4f-8e1f-4855-93d7-8f23eac9dbe3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.419779] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1265.419779] env[62627]: value = "task-2195315" [ 1265.419779] env[62627]: _type = "Task" [ 1265.419779] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.423020] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: e5a1fa80-6579-4334-9705-33a95dbc3797] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1265.429527] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195315, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.540717] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1265.540847] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquired lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.540964] env[62627]: DEBUG nova.network.neutron [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1265.665248] env[62627]: INFO nova.compute.manager [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Took 11.99 seconds to build instance. [ 1265.928051] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 513c755a-424a-4e47-887e-7b315ad2e583] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1265.935996] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195315, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.055055] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.055337] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.055569] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "1109aae0-62d2-4c55-8c68-f224f3de8b55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.055726] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "1109aae0-62d2-4c55-8c68-f224f3de8b55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.055920] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "1109aae0-62d2-4c55-8c68-f224f3de8b55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.058244] env[62627]: INFO nova.compute.manager [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Terminating instance [ 1266.158735] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquiring lock "5b001b38-d121-4e03-910e-54647536faf5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.167332] env[62627]: DEBUG oslo_concurrency.lockutils [None req-06a8e58b-7051-4e98-8e62-a61d7bc86591 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Lock "5b001b38-d121-4e03-910e-54647536faf5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.502s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.167572] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Lock "5b001b38-d121-4e03-910e-54647536faf5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.009s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.167782] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquiring lock "5b001b38-d121-4e03-910e-54647536faf5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.167979] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Lock "5b001b38-d121-4e03-910e-54647536faf5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.168162] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Lock "5b001b38-d121-4e03-910e-54647536faf5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.170337] env[62627]: INFO nova.compute.manager [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Terminating instance [ 1266.427036] env[62627]: INFO nova.network.neutron [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Port 1034439a-d24f-4eb6-8139-67b012254146 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1266.427406] env[62627]: DEBUG nova.network.neutron [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updating instance_info_cache with network_info: [{"id": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "address": "fa:16:3e:7b:bd:70", "network": {"id": "a1c5c227-8f1f-4881-a6d8-f446b64d9846", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1052922661-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e4f7ad3ebcf4c4d97bdbce58eec5a09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3166f86b-c8", "ovs_interfaceid": "3166f86b-c852-4335-9c18-b4c68f51d8ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.434369] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: e263159d-e9ff-4a19-a29a-cb06eaf2d679] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1266.436065] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195315, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544542} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.436334] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 7e340c99-8d15-4696-98d5-a952f5d2aeee/7e340c99-8d15-4696-98d5-a952f5d2aeee.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1266.436564] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1266.436992] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bcc90677-01ea-44ff-b501-ad69b9047f92 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.443910] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1266.443910] env[62627]: value = "task-2195316" [ 1266.443910] env[62627]: _type = "Task" [ 1266.443910] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.452574] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195316, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.565764] env[62627]: DEBUG nova.compute.manager [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1266.565948] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1266.566921] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12902201-859e-45f3-b4fa-2bc2409aceb4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.575143] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1266.575374] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddf4b1b5-6c41-4f4e-ac5d-5660b77b3f07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.582103] env[62627]: DEBUG oslo_vmware.api [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1266.582103] env[62627]: value = "task-2195317" [ 1266.582103] env[62627]: _type = "Task" [ 1266.582103] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.589608] env[62627]: DEBUG oslo_vmware.api [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195317, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.674378] env[62627]: DEBUG nova.compute.manager [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1266.674497] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1266.675402] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746b91f9-eb1b-4461-baf8-f0b41ec93a85 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.684492] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1266.685177] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-308b5451-de81-4d97-9f62-b3d804cdd293 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.693813] env[62627]: DEBUG oslo_vmware.api [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for the task: (returnval){ [ 1266.693813] env[62627]: value = "task-2195318" [ 1266.693813] env[62627]: _type = "Task" [ 1266.693813] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.702545] env[62627]: DEBUG oslo_vmware.api [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.930433] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Releasing lock "refresh_cache-1109aae0-62d2-4c55-8c68-f224f3de8b55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1266.938328] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: f8003eee-248c-4005-8465-6e3e96f98f88] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1266.954585] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195316, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064584} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.954829] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1266.955609] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0131be3-d77b-4a9b-b05e-9fef71ca2b16 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.977664] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 7e340c99-8d15-4696-98d5-a952f5d2aeee/7e340c99-8d15-4696-98d5-a952f5d2aeee.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1266.978114] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22bd2b9b-7a8b-4609-904f-5fee8ef7fa57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.997493] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1266.997493] env[62627]: value = "task-2195319" [ 1266.997493] env[62627]: _type = "Task" [ 1266.997493] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.005655] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195319, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.093238] env[62627]: DEBUG oslo_vmware.api [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195317, 'name': PowerOffVM_Task, 'duration_secs': 0.210322} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.093499] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1267.093708] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1267.093972] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1dfa6ae-f97d-4c0e-b86f-2ea42e349671 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.157815] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1267.158092] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1267.158265] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleting the datastore file [datastore2] 1109aae0-62d2-4c55-8c68-f224f3de8b55 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1267.158630] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1c6bc6c-c5c5-4ffc-b115-8c89229f8f4a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.166285] env[62627]: DEBUG oslo_vmware.api [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1267.166285] env[62627]: value = "task-2195321" [ 1267.166285] env[62627]: _type = "Task" [ 1267.166285] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.175454] env[62627]: DEBUG oslo_vmware.api [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.203237] env[62627]: DEBUG oslo_vmware.api [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195318, 'name': PowerOffVM_Task, 'duration_secs': 0.194067} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.203557] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1267.203792] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1267.204235] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ea78184-458f-49ca-a90f-45ab09620a5e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.273786] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.274221] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.275693] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1267.275974] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1267.276248] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Deleting the datastore file [datastore2] 5b001b38-d121-4e03-910e-54647536faf5 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1267.277050] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6cd9944-9440-4003-b0fe-9bc3b70a9b34 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.284341] env[62627]: DEBUG oslo_vmware.api [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for the task: (returnval){ [ 1267.284341] env[62627]: value = "task-2195323" [ 1267.284341] env[62627]: _type = "Task" [ 1267.284341] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.296075] env[62627]: DEBUG oslo_vmware.api [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195323, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.434405] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b34f588c-71fb-4709-8117-056c3342d80e tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "interface-1109aae0-62d2-4c55-8c68-f224f3de8b55-1034439a-d24f-4eb6-8139-67b012254146" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.809s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1267.441530] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 16ac82e3-b9e3-4c34-bb8c-2989cc33246f] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1267.507142] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195319, 'name': ReconfigVM_Task, 'duration_secs': 0.263547} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.507436] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 7e340c99-8d15-4696-98d5-a952f5d2aeee/7e340c99-8d15-4696-98d5-a952f5d2aeee.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1267.508112] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-addee231-5d16-4c64-94ee-d1a265cdd91e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.515058] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1267.515058] env[62627]: value = "task-2195324" [ 1267.515058] env[62627]: _type = "Task" [ 1267.515058] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.523162] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195324, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.676515] env[62627]: DEBUG oslo_vmware.api [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311302} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.676746] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1267.676905] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1267.677097] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1267.677273] env[62627]: INFO nova.compute.manager [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1267.677541] env[62627]: DEBUG oslo.service.loopingcall [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1267.677755] env[62627]: DEBUG nova.compute.manager [-] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1267.677854] env[62627]: DEBUG nova.network.neutron [-] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1267.777815] env[62627]: DEBUG nova.compute.manager [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1267.796576] env[62627]: DEBUG oslo_vmware.api [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Task: {'id': task-2195323, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213734} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.796576] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1267.796576] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1267.796576] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1267.796576] env[62627]: INFO nova.compute.manager [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1267.796576] env[62627]: DEBUG oslo.service.loopingcall [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1267.796912] env[62627]: DEBUG nova.compute.manager [-] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1267.796912] env[62627]: DEBUG nova.network.neutron [-] [instance: 5b001b38-d121-4e03-910e-54647536faf5] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1267.945705] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: bd583f6b-be8d-4f02-a67e-0ea10e6dd087] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1268.026287] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195324, 'name': Rename_Task, 'duration_secs': 0.151525} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.026287] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1268.027093] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39a983d6-13ca-4517-9ec8-34603fccf64b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.037161] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1268.037161] env[62627]: value = "task-2195325" [ 1268.037161] env[62627]: _type = "Task" [ 1268.037161] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.045734] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195325, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.084851] env[62627]: DEBUG nova.compute.manager [req-88e93a9c-f1a1-48bf-8646-5ba80a0f3895 req-ad7f8443-20b5-4654-869e-c36169e0d295 service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Received event network-vif-deleted-8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1268.084851] env[62627]: INFO nova.compute.manager [req-88e93a9c-f1a1-48bf-8646-5ba80a0f3895 req-ad7f8443-20b5-4654-869e-c36169e0d295 service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Neutron deleted interface 8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8; detaching it from the instance and deleting it from the info cache [ 1268.084851] env[62627]: DEBUG nova.network.neutron [req-88e93a9c-f1a1-48bf-8646-5ba80a0f3895 req-ad7f8443-20b5-4654-869e-c36169e0d295 service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.303306] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.303788] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.305152] env[62627]: INFO nova.compute.claims [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1268.450120] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 58511c0c-1cb0-4ba9-8ca9-3d2e5ca5db37] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1268.453612] env[62627]: DEBUG nova.compute.manager [req-be83a3ff-f5fa-4fef-bbce-58b9ec16bcb6 req-10404406-9b9f-4ca7-8e11-50ce0d7c11ea service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Received event network-vif-deleted-3166f86b-c852-4335-9c18-b4c68f51d8ab {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1268.453807] env[62627]: INFO nova.compute.manager [req-be83a3ff-f5fa-4fef-bbce-58b9ec16bcb6 req-10404406-9b9f-4ca7-8e11-50ce0d7c11ea service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Neutron deleted interface 3166f86b-c852-4335-9c18-b4c68f51d8ab; detaching it from the instance and deleting it from the info cache [ 1268.453969] env[62627]: DEBUG nova.network.neutron [req-be83a3ff-f5fa-4fef-bbce-58b9ec16bcb6 req-10404406-9b9f-4ca7-8e11-50ce0d7c11ea service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.547691] env[62627]: DEBUG oslo_vmware.api [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195325, 'name': PowerOnVM_Task, 'duration_secs': 0.465056} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.547973] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1268.548188] env[62627]: INFO nova.compute.manager [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Took 7.39 seconds to spawn the instance on the hypervisor. [ 1268.548421] env[62627]: DEBUG nova.compute.manager [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1268.549201] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5395acee-4a90-4baf-a1f7-6aa75687721a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.562630] env[62627]: DEBUG nova.network.neutron [-] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.587941] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fbb66d24-2508-42a8-ba73-89cc00a1986b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.599587] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f93921-dcef-412f-b8a5-0370cf58efdf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.628625] env[62627]: DEBUG nova.compute.manager [req-88e93a9c-f1a1-48bf-8646-5ba80a0f3895 req-ad7f8443-20b5-4654-869e-c36169e0d295 service nova] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Detach interface failed, port_id=8875fdf7-7fa0-4e9d-bfe4-be006d9c72f8, reason: Instance 5b001b38-d121-4e03-910e-54647536faf5 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1268.937618] env[62627]: DEBUG nova.network.neutron [-] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.955780] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 7184a7d2-72c6-48f2-b125-880f1f9858c7] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1268.958034] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35f36295-1954-4f07-925e-337ad996f5da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.968162] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcce1f3c-f7b2-4ec4-a4b3-03ca91ae3375 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.997891] env[62627]: DEBUG nova.compute.manager [req-be83a3ff-f5fa-4fef-bbce-58b9ec16bcb6 req-10404406-9b9f-4ca7-8e11-50ce0d7c11ea service nova] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Detach interface failed, port_id=3166f86b-c852-4335-9c18-b4c68f51d8ab, reason: Instance 1109aae0-62d2-4c55-8c68-f224f3de8b55 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1269.065157] env[62627]: INFO nova.compute.manager [-] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Took 1.27 seconds to deallocate network for instance. [ 1269.073458] env[62627]: INFO nova.compute.manager [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Took 13.72 seconds to build instance. [ 1269.429813] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e09c90-bf5e-49e9-824b-eef6e36b8623 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.439425] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13785e66-2d22-4594-b6f3-8d0638c38c60 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.442524] env[62627]: INFO nova.compute.manager [-] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Took 1.76 seconds to deallocate network for instance. [ 1269.470506] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 9adb2baf-315f-4122-a657-cb99757002ba] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1269.473747] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8c4e8c-fd64-4f3e-bee3-6cb34e6f8c77 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.483234] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953f40ec-85fd-4c99-bf27-3a7b7a7c0e62 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.498154] env[62627]: DEBUG nova.compute.provider_tree [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1269.576332] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1269.576560] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e25e24c9-555b-4402-927b-d13072c83244 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "7e340c99-8d15-4696-98d5-a952f5d2aeee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.235s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1269.974495] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1269.974809] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 493e7ea0-b160-4cd0-ae0b-bc46e4d89217] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1270.000553] env[62627]: DEBUG nova.scheduler.client.report [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1270.478989] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 0de040e4-8896-4f02-9284-f68100254631] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1270.481634] env[62627]: DEBUG nova.compute.manager [req-166b98cb-a198-4c73-8f29-3458aaf9017c req-5d9630ee-f687-4b3b-89b0-b06f94e21d1a service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Received event network-changed-2cabec54-95a1-48db-b0ba-596ee7f4671e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1270.481823] env[62627]: DEBUG nova.compute.manager [req-166b98cb-a198-4c73-8f29-3458aaf9017c req-5d9630ee-f687-4b3b-89b0-b06f94e21d1a service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Refreshing instance network info cache due to event network-changed-2cabec54-95a1-48db-b0ba-596ee7f4671e. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1270.482047] env[62627]: DEBUG oslo_concurrency.lockutils [req-166b98cb-a198-4c73-8f29-3458aaf9017c req-5d9630ee-f687-4b3b-89b0-b06f94e21d1a service nova] Acquiring lock "refresh_cache-7e340c99-8d15-4696-98d5-a952f5d2aeee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1270.482203] env[62627]: DEBUG oslo_concurrency.lockutils [req-166b98cb-a198-4c73-8f29-3458aaf9017c req-5d9630ee-f687-4b3b-89b0-b06f94e21d1a service nova] Acquired lock "refresh_cache-7e340c99-8d15-4696-98d5-a952f5d2aeee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.482364] env[62627]: DEBUG nova.network.neutron [req-166b98cb-a198-4c73-8f29-3458aaf9017c req-5d9630ee-f687-4b3b-89b0-b06f94e21d1a service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Refreshing network info cache for port 2cabec54-95a1-48db-b0ba-596ee7f4671e {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1270.504929] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1270.505445] env[62627]: DEBUG nova.compute.manager [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1270.507983] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.932s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.508232] env[62627]: DEBUG nova.objects.instance [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Lazy-loading 'resources' on Instance uuid 5b001b38-d121-4e03-910e-54647536faf5 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1270.987260] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: e2e2fa28-b9f5-4c46-b94f-013f8d37c2ea] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1271.011647] env[62627]: DEBUG nova.compute.utils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1271.018039] env[62627]: DEBUG nova.compute.manager [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1271.018039] env[62627]: DEBUG nova.network.neutron [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1271.069691] env[62627]: DEBUG nova.policy [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8960d0a8ce744e8b0536074751e9d0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f41b0c171180469e9092b8a8da697a78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1271.147878] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb6c407-dee6-4820-8196-f4191c0f6d8c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.156600] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0fda2f-38b6-422b-a66d-c098737bb7a6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.191283] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096d96e4-4e1d-4c2a-9153-9f9da10b5314 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.199635] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5704e253-1dcb-487c-b557-d4a2c083cf61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.216996] env[62627]: DEBUG nova.compute.provider_tree [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.257299] env[62627]: DEBUG nova.network.neutron [req-166b98cb-a198-4c73-8f29-3458aaf9017c req-5d9630ee-f687-4b3b-89b0-b06f94e21d1a service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Updated VIF entry in instance network info cache for port 2cabec54-95a1-48db-b0ba-596ee7f4671e. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1271.257648] env[62627]: DEBUG nova.network.neutron [req-166b98cb-a198-4c73-8f29-3458aaf9017c req-5d9630ee-f687-4b3b-89b0-b06f94e21d1a service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Updating instance_info_cache with network_info: [{"id": "2cabec54-95a1-48db-b0ba-596ee7f4671e", "address": "fa:16:3e:cb:3f:26", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cabec54-95", "ovs_interfaceid": "2cabec54-95a1-48db-b0ba-596ee7f4671e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.372531] env[62627]: DEBUG nova.network.neutron [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Successfully created port: f766cde0-2056-4245-861e-695b52bcc3b2 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1271.490234] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: f6357f01-d7cb-4477-be2d-98fc17d7a940] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1271.516965] env[62627]: DEBUG nova.compute.manager [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1271.720369] env[62627]: DEBUG nova.scheduler.client.report [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1271.759977] env[62627]: DEBUG oslo_concurrency.lockutils [req-166b98cb-a198-4c73-8f29-3458aaf9017c req-5d9630ee-f687-4b3b-89b0-b06f94e21d1a service nova] Releasing lock "refresh_cache-7e340c99-8d15-4696-98d5-a952f5d2aeee" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1271.994823] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 84b8381c-bdd4-462e-aa98-0cc972f00882] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1272.225647] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.717s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.228276] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.254s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.228581] env[62627]: DEBUG nova.objects.instance [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'resources' on Instance uuid 1109aae0-62d2-4c55-8c68-f224f3de8b55 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1272.253113] env[62627]: INFO nova.scheduler.client.report [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Deleted allocations for instance 5b001b38-d121-4e03-910e-54647536faf5 [ 1272.507893] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: d2d4dd6b-d5c5-402b-b165-4c9ae1e36b93] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1272.529961] env[62627]: DEBUG nova.compute.manager [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1272.558520] env[62627]: DEBUG nova.virt.hardware [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1272.558828] env[62627]: DEBUG nova.virt.hardware [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1272.558992] env[62627]: DEBUG nova.virt.hardware [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1272.559197] env[62627]: DEBUG nova.virt.hardware [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1272.559403] env[62627]: DEBUG nova.virt.hardware [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1272.559579] env[62627]: DEBUG nova.virt.hardware [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1272.559787] env[62627]: DEBUG nova.virt.hardware [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1272.559941] env[62627]: DEBUG nova.virt.hardware [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1272.560118] env[62627]: DEBUG nova.virt.hardware [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1272.560279] env[62627]: DEBUG nova.virt.hardware [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1272.560447] env[62627]: DEBUG nova.virt.hardware [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1272.561325] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b762a4b1-9135-47e5-b0f8-f5dbfd3b075a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.570230] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086298a7-0216-4605-b8df-747ab2e36435 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.764576] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8297deaa-8799-47ac-8265-2d0a53d43d76 tempest-InstanceActionsNegativeTestJSON-1216622255 tempest-InstanceActionsNegativeTestJSON-1216622255-project-member] Lock "5b001b38-d121-4e03-910e-54647536faf5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.597s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.767145] env[62627]: DEBUG nova.compute.manager [req-2dbbd693-aa70-42fd-9661-d6aa4c9acf04 req-e296a53c-7544-4390-a702-fe398d0a8850 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Received event network-vif-plugged-f766cde0-2056-4245-861e-695b52bcc3b2 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1272.767364] env[62627]: DEBUG oslo_concurrency.lockutils [req-2dbbd693-aa70-42fd-9661-d6aa4c9acf04 req-e296a53c-7544-4390-a702-fe398d0a8850 service nova] Acquiring lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.767565] env[62627]: DEBUG oslo_concurrency.lockutils [req-2dbbd693-aa70-42fd-9661-d6aa4c9acf04 req-e296a53c-7544-4390-a702-fe398d0a8850 service nova] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.767731] env[62627]: DEBUG oslo_concurrency.lockutils [req-2dbbd693-aa70-42fd-9661-d6aa4c9acf04 req-e296a53c-7544-4390-a702-fe398d0a8850 service nova] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.767901] env[62627]: DEBUG nova.compute.manager [req-2dbbd693-aa70-42fd-9661-d6aa4c9acf04 req-e296a53c-7544-4390-a702-fe398d0a8850 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] No waiting events found dispatching network-vif-plugged-f766cde0-2056-4245-861e-695b52bcc3b2 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1272.768072] env[62627]: WARNING nova.compute.manager [req-2dbbd693-aa70-42fd-9661-d6aa4c9acf04 req-e296a53c-7544-4390-a702-fe398d0a8850 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Received unexpected event network-vif-plugged-f766cde0-2056-4245-861e-695b52bcc3b2 for instance with vm_state building and task_state spawning. [ 1272.840553] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe18c572-dcdd-46ba-8edb-205fff3bd451 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.849065] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ad271d-4d2e-4f94-b025-e427de4afd10 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.881762] env[62627]: DEBUG nova.network.neutron [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Successfully updated port: f766cde0-2056-4245-861e-695b52bcc3b2 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1272.883375] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d631c404-2596-43de-9175-f96d9b248473 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.891275] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bf2afb-4e1e-46e0-a7a3-ec7980134e3a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.905889] env[62627]: DEBUG nova.compute.provider_tree [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1273.011387] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: ae4ec050-5062-4e57-8090-f67f8c8013ff] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1273.387123] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "refresh_cache-cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1273.387275] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "refresh_cache-cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.387431] env[62627]: DEBUG nova.network.neutron [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1273.408751] env[62627]: DEBUG nova.scheduler.client.report [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1273.515436] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 66882603-6ff6-49c2-9f42-3521ecaf98b2] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1273.913037] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.685s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1273.928865] env[62627]: DEBUG nova.network.neutron [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1273.939634] env[62627]: INFO nova.scheduler.client.report [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleted allocations for instance 1109aae0-62d2-4c55-8c68-f224f3de8b55 [ 1274.019063] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 457e5aa6-d229-4f48-8817-e918ec4bd0f9] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1274.167985] env[62627]: DEBUG nova.network.neutron [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Updating instance_info_cache with network_info: [{"id": "f766cde0-2056-4245-861e-695b52bcc3b2", "address": "fa:16:3e:34:82:51", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf766cde0-20", "ovs_interfaceid": "f766cde0-2056-4245-861e-695b52bcc3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.450377] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18aec2fb-1c67-4d5b-b824-9ec0bae4fc1d tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "1109aae0-62d2-4c55-8c68-f224f3de8b55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.395s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.522168] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 892fc84b-02a9-4fa0-81b5-80326a060c64] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1274.670984] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "refresh_cache-cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1274.671333] env[62627]: DEBUG nova.compute.manager [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Instance network_info: |[{"id": "f766cde0-2056-4245-861e-695b52bcc3b2", "address": "fa:16:3e:34:82:51", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf766cde0-20", "ovs_interfaceid": "f766cde0-2056-4245-861e-695b52bcc3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1274.671756] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:82:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f766cde0-2056-4245-861e-695b52bcc3b2', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1274.684663] env[62627]: DEBUG oslo.service.loopingcall [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1274.684886] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1274.685121] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1ec303f-0c29-47d1-a6ba-55c07dd97679 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.709022] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1274.709022] env[62627]: value = "task-2195328" [ 1274.709022] env[62627]: _type = "Task" [ 1274.709022] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.718236] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195328, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.796368] env[62627]: DEBUG nova.compute.manager [req-a1b35111-726f-4971-8e58-7ae50826a62d req-a3c92454-75c4-41b8-9e29-cedf05ef3836 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Received event network-changed-f766cde0-2056-4245-861e-695b52bcc3b2 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1274.796606] env[62627]: DEBUG nova.compute.manager [req-a1b35111-726f-4971-8e58-7ae50826a62d req-a3c92454-75c4-41b8-9e29-cedf05ef3836 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Refreshing instance network info cache due to event network-changed-f766cde0-2056-4245-861e-695b52bcc3b2. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1274.796863] env[62627]: DEBUG oslo_concurrency.lockutils [req-a1b35111-726f-4971-8e58-7ae50826a62d req-a3c92454-75c4-41b8-9e29-cedf05ef3836 service nova] Acquiring lock "refresh_cache-cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1274.797089] env[62627]: DEBUG oslo_concurrency.lockutils [req-a1b35111-726f-4971-8e58-7ae50826a62d req-a3c92454-75c4-41b8-9e29-cedf05ef3836 service nova] Acquired lock "refresh_cache-cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.797323] env[62627]: DEBUG nova.network.neutron [req-a1b35111-726f-4971-8e58-7ae50826a62d req-a3c92454-75c4-41b8-9e29-cedf05ef3836 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Refreshing network info cache for port f766cde0-2056-4245-861e-695b52bcc3b2 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1274.902293] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.902625] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.902907] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.903141] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.903328] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.905447] env[62627]: INFO nova.compute.manager [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Terminating instance [ 1275.026716] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 1129349d-65db-4e1d-bb08-cea793adf996] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1275.217666] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195328, 'name': CreateVM_Task} progress is 25%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.410201] env[62627]: DEBUG nova.compute.manager [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1275.410438] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1275.411342] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c84c07-2625-44bb-aaa9-c71a766d01a7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.421342] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1275.422023] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd06c7a9-3d36-479e-91d3-dd3da9a957fa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.428566] env[62627]: DEBUG oslo_vmware.api [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1275.428566] env[62627]: value = "task-2195329" [ 1275.428566] env[62627]: _type = "Task" [ 1275.428566] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.437330] env[62627]: DEBUG oslo_vmware.api [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195329, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.530218] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: aa181094-cb78-44e0-8d7a-1d677e0af1fe] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1275.721030] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195328, 'name': CreateVM_Task, 'duration_secs': 1.003266} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.721030] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1275.721030] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1275.721030] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.721030] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1275.721030] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddbd0805-c5e0-4dac-b715-d4aa4f95e0eb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.725133] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1275.725133] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52de136a-5656-2049-fbdd-9649bcf7a759" [ 1275.725133] env[62627]: _type = "Task" [ 1275.725133] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.736052] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52de136a-5656-2049-fbdd-9649bcf7a759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.822176] env[62627]: DEBUG nova.network.neutron [req-a1b35111-726f-4971-8e58-7ae50826a62d req-a3c92454-75c4-41b8-9e29-cedf05ef3836 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Updated VIF entry in instance network info cache for port f766cde0-2056-4245-861e-695b52bcc3b2. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1275.822570] env[62627]: DEBUG nova.network.neutron [req-a1b35111-726f-4971-8e58-7ae50826a62d req-a3c92454-75c4-41b8-9e29-cedf05ef3836 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Updating instance_info_cache with network_info: [{"id": "f766cde0-2056-4245-861e-695b52bcc3b2", "address": "fa:16:3e:34:82:51", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf766cde0-20", "ovs_interfaceid": "f766cde0-2056-4245-861e-695b52bcc3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.939459] env[62627]: DEBUG oslo_vmware.api [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195329, 'name': PowerOffVM_Task, 'duration_secs': 0.198347} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.939794] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1275.939997] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1275.940267] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-959d4098-648d-444f-8478-483e30626d54 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.007765] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1276.007765] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1276.007765] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleting the datastore file [datastore2] c7e3246d-ffe7-4d81-a2ec-c5878649ed6b {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1276.007765] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32229e25-08a3-41d6-a521-c2765308bb84 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.014295] env[62627]: DEBUG oslo_vmware.api [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for the task: (returnval){ [ 1276.014295] env[62627]: value = "task-2195332" [ 1276.014295] env[62627]: _type = "Task" [ 1276.014295] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.022806] env[62627]: DEBUG oslo_vmware.api [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195332, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.033383] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 241311b0-ba81-41d1-a9b6-cd40d04dbe3c] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1276.237861] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52de136a-5656-2049-fbdd-9649bcf7a759, 'name': SearchDatastore_Task, 'duration_secs': 0.011597} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.238187] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.238465] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1276.238706] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.238856] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.239048] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1276.239311] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-153ab843-7d78-483f-808a-e5f5c4e5e439 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.249299] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1276.249531] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1276.250378] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aac10ed0-79b6-4bf7-8155-1b4e08598003 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.256481] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1276.256481] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b2ed96-aee6-1abd-996c-90f533c4f1f5" [ 1276.256481] env[62627]: _type = "Task" [ 1276.256481] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.264618] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b2ed96-aee6-1abd-996c-90f533c4f1f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.325447] env[62627]: DEBUG oslo_concurrency.lockutils [req-a1b35111-726f-4971-8e58-7ae50826a62d req-a3c92454-75c4-41b8-9e29-cedf05ef3836 service nova] Releasing lock "refresh_cache-cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.524039] env[62627]: DEBUG oslo_vmware.api [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Task: {'id': task-2195332, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169769} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.524325] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1276.524523] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1276.524702] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1276.524874] env[62627]: INFO nova.compute.manager [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1276.525129] env[62627]: DEBUG oslo.service.loopingcall [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1276.525324] env[62627]: DEBUG nova.compute.manager [-] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1276.525421] env[62627]: DEBUG nova.network.neutron [-] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1276.536757] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: b1ba58c1-340f-486a-8f47-86fbc7b27485] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1276.767640] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b2ed96-aee6-1abd-996c-90f533c4f1f5, 'name': SearchDatastore_Task, 'duration_secs': 0.010106} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.768515] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97ade836-1c75-4411-a097-da845a181464 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.774442] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1276.774442] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52378a7e-9aaf-6c38-82c9-6601aa605652" [ 1276.774442] env[62627]: _type = "Task" [ 1276.774442] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.783444] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52378a7e-9aaf-6c38-82c9-6601aa605652, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.923305] env[62627]: DEBUG nova.compute.manager [req-c1ad924f-8bb8-4d22-8384-eae19d273ef6 req-185727fd-7fe3-4c2c-b2f6-e464f452aa04 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Received event network-vif-deleted-58815871-fdac-4406-a61e-4d7fca125122 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1276.923521] env[62627]: INFO nova.compute.manager [req-c1ad924f-8bb8-4d22-8384-eae19d273ef6 req-185727fd-7fe3-4c2c-b2f6-e464f452aa04 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Neutron deleted interface 58815871-fdac-4406-a61e-4d7fca125122; detaching it from the instance and deleting it from the info cache [ 1276.923672] env[62627]: DEBUG nova.network.neutron [req-c1ad924f-8bb8-4d22-8384-eae19d273ef6 req-185727fd-7fe3-4c2c-b2f6-e464f452aa04 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.040119] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: a397bce6-30c7-485d-81ab-88a8e021996c] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1277.285401] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52378a7e-9aaf-6c38-82c9-6601aa605652, 'name': SearchDatastore_Task, 'duration_secs': 0.028662} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.285687] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1277.285944] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e/cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1277.286294] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9ef0288-b793-45b7-8c9c-91682fdd6330 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.294680] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1277.294680] env[62627]: value = "task-2195333" [ 1277.294680] env[62627]: _type = "Task" [ 1277.294680] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.303147] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.408828] env[62627]: DEBUG nova.network.neutron [-] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.428751] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e23b6100-d43d-493c-b78d-a23fab62dacf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.439637] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e0dea7-2126-441c-bc95-272a83c593cc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.474958] env[62627]: DEBUG nova.compute.manager [req-c1ad924f-8bb8-4d22-8384-eae19d273ef6 req-185727fd-7fe3-4c2c-b2f6-e464f452aa04 service nova] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Detach interface failed, port_id=58815871-fdac-4406-a61e-4d7fca125122, reason: Instance c7e3246d-ffe7-4d81-a2ec-c5878649ed6b could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1277.543618] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: c1fd6afb-9332-446d-a441-5942b60edc94] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1277.808017] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195333, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490017} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.808803] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e/cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1277.808803] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1277.808971] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8ccd6b5-f36e-4c93-8d92-5f9010fdaa8c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.818172] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1277.818172] env[62627]: value = "task-2195334" [ 1277.818172] env[62627]: _type = "Task" [ 1277.818172] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.827116] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195334, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.912983] env[62627]: INFO nova.compute.manager [-] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Took 1.39 seconds to deallocate network for instance. [ 1278.047547] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 7366e4b0-bdba-435d-844d-8e81c4464b31] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1278.329357] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195334, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065935} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.329646] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1278.330473] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf29961a-5454-4da4-86c6-8062ede02ef5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.357137] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e/cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1278.357811] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ada4aad-8258-43aa-be56-051b66c367f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.378918] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1278.378918] env[62627]: value = "task-2195336" [ 1278.378918] env[62627]: _type = "Task" [ 1278.378918] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.388369] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195336, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.421635] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.421908] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.422146] env[62627]: DEBUG nova.objects.instance [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lazy-loading 'resources' on Instance uuid c7e3246d-ffe7-4d81-a2ec-c5878649ed6b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1278.554467] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: ab999825-4310-4fd8-81f0-6bb5dde4175b] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1278.888950] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195336, 'name': ReconfigVM_Task, 'duration_secs': 0.380101} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.889235] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Reconfigured VM instance instance-0000006b to attach disk [datastore2] cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e/cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1278.889859] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d37d01ad-c0f5-4d2c-bc64-ebcb890c6d75 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.896573] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1278.896573] env[62627]: value = "task-2195337" [ 1278.896573] env[62627]: _type = "Task" [ 1278.896573] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.906159] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195337, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.051669] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.052066] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.057985] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719496ef-fcfa-4d41-add4-42c8dfaa08f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.069082] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d1c548-1a3b-4eb1-81f0-175230645f65 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.105175] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891d18b1-af3e-46be-b8e1-6b96121765f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.113593] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc869e2-da04-40ae-985f-47abee6e9656 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.127422] env[62627]: DEBUG nova.compute.provider_tree [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1279.406540] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195337, 'name': Rename_Task, 'duration_secs': 0.20127} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.406815] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1279.407071] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6b4a59a-43fa-41df-a93f-71649f5ea232 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.413917] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1279.413917] env[62627]: value = "task-2195338" [ 1279.413917] env[62627]: _type = "Task" [ 1279.413917] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.423139] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195338, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.554129] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1279.554550] env[62627]: DEBUG nova.compute.manager [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1279.557195] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1279.630141] env[62627]: DEBUG nova.scheduler.client.report [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1279.926047] env[62627]: DEBUG oslo_vmware.api [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195338, 'name': PowerOnVM_Task, 'duration_secs': 0.44368} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.926047] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1279.926047] env[62627]: INFO nova.compute.manager [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Took 7.40 seconds to spawn the instance on the hypervisor. [ 1279.926239] env[62627]: DEBUG nova.compute.manager [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1279.927103] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecb2562-5e06-440c-9976-9ab2ef1c9bf5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.066819] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.067163] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1280.081363] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.135245] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.713s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.137926] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.056s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.139111] env[62627]: INFO nova.compute.claims [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1280.156220] env[62627]: INFO nova.scheduler.client.report [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Deleted allocations for instance c7e3246d-ffe7-4d81-a2ec-c5878649ed6b [ 1280.445349] env[62627]: INFO nova.compute.manager [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Took 12.16 seconds to build instance. [ 1280.663339] env[62627]: DEBUG oslo_concurrency.lockutils [None req-6702ff64-af57-49fb-9a9d-a0270db33b30 tempest-AttachInterfacesTestJSON-578257446 tempest-AttachInterfacesTestJSON-578257446-project-member] Lock "c7e3246d-ffe7-4d81-a2ec-c5878649ed6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.761s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.874728] env[62627]: DEBUG nova.compute.manager [req-c86bbde1-ea2f-4485-8385-c14c2b366ea4 req-1a6586d4-48f2-4d6f-abcf-35cc9feba0b0 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Received event network-changed-f766cde0-2056-4245-861e-695b52bcc3b2 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1280.874985] env[62627]: DEBUG nova.compute.manager [req-c86bbde1-ea2f-4485-8385-c14c2b366ea4 req-1a6586d4-48f2-4d6f-abcf-35cc9feba0b0 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Refreshing instance network info cache due to event network-changed-f766cde0-2056-4245-861e-695b52bcc3b2. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1280.875294] env[62627]: DEBUG oslo_concurrency.lockutils [req-c86bbde1-ea2f-4485-8385-c14c2b366ea4 req-1a6586d4-48f2-4d6f-abcf-35cc9feba0b0 service nova] Acquiring lock "refresh_cache-cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.875522] env[62627]: DEBUG oslo_concurrency.lockutils [req-c86bbde1-ea2f-4485-8385-c14c2b366ea4 req-1a6586d4-48f2-4d6f-abcf-35cc9feba0b0 service nova] Acquired lock "refresh_cache-cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.875754] env[62627]: DEBUG nova.network.neutron [req-c86bbde1-ea2f-4485-8385-c14c2b366ea4 req-1a6586d4-48f2-4d6f-abcf-35cc9feba0b0 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Refreshing network info cache for port f766cde0-2056-4245-861e-695b52bcc3b2 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1280.948141] env[62627]: DEBUG oslo_concurrency.lockutils [None req-763188f1-bd04-4312-b50d-3e4d137a7141 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.674s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.246267] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2963ab8-d48b-44e4-96b4-cbed4dbaa63b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.254644] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beac9370-cc2d-4978-ae33-64718b03ca87 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.285671] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b89a41-4a5a-4c47-8d66-6422121651c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.293574] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a693d92a-cc43-4eb5-bd16-ef54f0621377 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.308812] env[62627]: DEBUG nova.compute.provider_tree [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.616265] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.616405] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquired lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.616761] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Forcefully refreshing network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1281.626178] env[62627]: DEBUG nova.network.neutron [req-c86bbde1-ea2f-4485-8385-c14c2b366ea4 req-1a6586d4-48f2-4d6f-abcf-35cc9feba0b0 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Updated VIF entry in instance network info cache for port f766cde0-2056-4245-861e-695b52bcc3b2. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1281.626911] env[62627]: DEBUG nova.network.neutron [req-c86bbde1-ea2f-4485-8385-c14c2b366ea4 req-1a6586d4-48f2-4d6f-abcf-35cc9feba0b0 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Updating instance_info_cache with network_info: [{"id": "f766cde0-2056-4245-861e-695b52bcc3b2", "address": "fa:16:3e:34:82:51", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf766cde0-20", "ovs_interfaceid": "f766cde0-2056-4245-861e-695b52bcc3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.811872] env[62627]: DEBUG nova.scheduler.client.report [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1282.128992] env[62627]: DEBUG oslo_concurrency.lockutils [req-c86bbde1-ea2f-4485-8385-c14c2b366ea4 req-1a6586d4-48f2-4d6f-abcf-35cc9feba0b0 service nova] Releasing lock "refresh_cache-cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.317445] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.180s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.318064] env[62627]: DEBUG nova.compute.manager [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1282.712786] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "1593ca50-bc97-464a-86ca-17995db15758" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.713121] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.825021] env[62627]: DEBUG nova.compute.utils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1282.825021] env[62627]: DEBUG nova.compute.manager [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1282.825021] env[62627]: DEBUG nova.network.neutron [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1282.863559] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance_info_cache with network_info: [{"id": "5825a537-f659-4e7e-907e-e22208e06c0c", "address": "fa:16:3e:a9:0b:9d", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5825a537-f6", "ovs_interfaceid": "5825a537-f659-4e7e-907e-e22208e06c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.886536] env[62627]: DEBUG nova.policy [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd847ab3ec20743d0997defea8008448e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '710e7463994b4ec08bcc6d479cd1138b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1283.217592] env[62627]: DEBUG nova.compute.utils [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1283.242788] env[62627]: DEBUG nova.network.neutron [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Successfully created port: d1a62857-50ae-4f10-8206-bfd58d9da6e0 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1283.330686] env[62627]: DEBUG nova.compute.manager [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1283.368116] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Releasing lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.368382] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updated the network info_cache for instance {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1283.368565] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.368732] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.368893] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.369054] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.369223] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.369398] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.721387] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.874453] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Getting list of instances from cluster (obj){ [ 1283.874453] env[62627]: value = "domain-c8" [ 1283.874453] env[62627]: _type = "ClusterComputeResource" [ 1283.874453] env[62627]: } {{(pid=62627) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1283.875541] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ae4960-6c5e-4a9c-80ab-511d71c88488 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.891651] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Got total of 6 instances {{(pid=62627) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1283.891845] env[62627]: WARNING nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] While synchronizing instance power states, found 7 instances in the database and 6 instances on the hypervisor. [ 1283.891986] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Triggering sync for uuid 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a {{(pid=62627) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1283.892431] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Triggering sync for uuid e9d33611-e373-4303-937d-b43d893e51f5 {{(pid=62627) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1283.892431] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Triggering sync for uuid b68042fd-4955-41e4-b14f-1f8268ace377 {{(pid=62627) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1283.892543] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Triggering sync for uuid 1593ca50-bc97-464a-86ca-17995db15758 {{(pid=62627) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1283.892624] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Triggering sync for uuid 7e340c99-8d15-4696-98d5-a952f5d2aeee {{(pid=62627) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1283.892775] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Triggering sync for uuid cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e {{(pid=62627) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1283.892921] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Triggering sync for uuid 00bb8c04-3c0b-48ff-8127-13d3e15cb86c {{(pid=62627) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1283.893268] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.893479] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.893738] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "e9d33611-e373-4303-937d-b43d893e51f5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.893920] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "e9d33611-e373-4303-937d-b43d893e51f5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.894160] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "b68042fd-4955-41e4-b14f-1f8268ace377" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.894338] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "b68042fd-4955-41e4-b14f-1f8268ace377" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.894720] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "1593ca50-bc97-464a-86ca-17995db15758" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.894720] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "1593ca50-bc97-464a-86ca-17995db15758" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.894940] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "7e340c99-8d15-4696-98d5-a952f5d2aeee" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.895126] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "7e340c99-8d15-4696-98d5-a952f5d2aeee" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.895416] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.895515] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.895732] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.895961] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.896056] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1283.896819] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d37ac3-bb0a-4f80-88ee-9418581c64bd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.899922] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efeee9fe-7aa7-45cb-bb09-1afdb5eca5c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.902610] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf475b5f-4290-4394-b481-31ba95ac866b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.905280] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b40329a-2b10-42af-8283-29e8de7a5ed5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.907843] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83cfa2a8-a1f0-47ac-a852-c2d9bf95b7a8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.910629] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d87f751-f096-4c91-b6fd-9f569483d913 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.913503] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1284.339635] env[62627]: DEBUG nova.compute.manager [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1284.365517] env[62627]: DEBUG nova.virt.hardware [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1284.365765] env[62627]: DEBUG nova.virt.hardware [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1284.365920] env[62627]: DEBUG nova.virt.hardware [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1284.366114] env[62627]: DEBUG nova.virt.hardware [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1284.366263] env[62627]: DEBUG nova.virt.hardware [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1284.366409] env[62627]: DEBUG nova.virt.hardware [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1284.366658] env[62627]: DEBUG nova.virt.hardware [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1284.366826] env[62627]: DEBUG nova.virt.hardware [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1284.366995] env[62627]: DEBUG nova.virt.hardware [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1284.367180] env[62627]: DEBUG nova.virt.hardware [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1284.367351] env[62627]: DEBUG nova.virt.hardware [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1284.368223] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d3a7df-0ac0-43b9-98db-f34dfb62dd02 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.376045] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a51661-672b-4281-aecc-6340f626d8ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.417417] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1284.417648] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1284.417814] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.417968] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1284.418896] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634e81bf-42dd-4d70-a1cf-b0590d9a9187 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.427601] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9140c13-cc9f-4a76-8320-056c864bf6f9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.433747] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "7e340c99-8d15-4696-98d5-a952f5d2aeee" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.539s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.434084] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.541s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.441766] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "e9d33611-e373-4303-937d-b43d893e51f5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.548s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.442080] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "b68042fd-4955-41e4-b14f-1f8268ace377" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.548s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.442734] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.547s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.443654] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1832ef2f-59e7-4f5e-b2eb-2625027bce47 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.446093] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "1593ca50-bc97-464a-86ca-17995db15758" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.551s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.450537] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761e6e66-e02a-4bdd-a175-4d7133fb12ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.478628] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180076MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1284.478771] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1284.478958] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1284.654526] env[62627]: DEBUG nova.compute.manager [req-d40bbdee-69cd-4fac-82d7-63ca8ea021f1 req-befb5932-87c0-46d9-94b8-a28f150460e4 service nova] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Received event network-vif-plugged-d1a62857-50ae-4f10-8206-bfd58d9da6e0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1284.654744] env[62627]: DEBUG oslo_concurrency.lockutils [req-d40bbdee-69cd-4fac-82d7-63ca8ea021f1 req-befb5932-87c0-46d9-94b8-a28f150460e4 service nova] Acquiring lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1284.654947] env[62627]: DEBUG oslo_concurrency.lockutils [req-d40bbdee-69cd-4fac-82d7-63ca8ea021f1 req-befb5932-87c0-46d9-94b8-a28f150460e4 service nova] Lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1284.655131] env[62627]: DEBUG oslo_concurrency.lockutils [req-d40bbdee-69cd-4fac-82d7-63ca8ea021f1 req-befb5932-87c0-46d9-94b8-a28f150460e4 service nova] Lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.655301] env[62627]: DEBUG nova.compute.manager [req-d40bbdee-69cd-4fac-82d7-63ca8ea021f1 req-befb5932-87c0-46d9-94b8-a28f150460e4 service nova] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] No waiting events found dispatching network-vif-plugged-d1a62857-50ae-4f10-8206-bfd58d9da6e0 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1284.655468] env[62627]: WARNING nova.compute.manager [req-d40bbdee-69cd-4fac-82d7-63ca8ea021f1 req-befb5932-87c0-46d9-94b8-a28f150460e4 service nova] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Received unexpected event network-vif-plugged-d1a62857-50ae-4f10-8206-bfd58d9da6e0 for instance with vm_state building and task_state spawning. [ 1284.748669] env[62627]: DEBUG nova.network.neutron [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Successfully updated port: d1a62857-50ae-4f10-8206-bfd58d9da6e0 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1284.784099] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "1593ca50-bc97-464a-86ca-17995db15758" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1284.784369] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1284.784630] env[62627]: INFO nova.compute.manager [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Attaching volume ade44e27-be53-4d9a-b346-ccf17aadfded to /dev/sdb [ 1284.818413] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3daa08bc-9da8-4721-98fc-f08ece3fc52f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.825705] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c6779b-999a-4d98-a01d-bc1d4c188543 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.839528] env[62627]: DEBUG nova.virt.block_device [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating existing volume attachment record: 0d1c766e-1b4d-49de-859f-455a93f2c91b {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1285.254072] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "refresh_cache-00bb8c04-3c0b-48ff-8127-13d3e15cb86c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.254264] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "refresh_cache-00bb8c04-3c0b-48ff-8127-13d3e15cb86c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.254355] env[62627]: DEBUG nova.network.neutron [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1285.507297] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1285.507632] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance e9d33611-e373-4303-937d-b43d893e51f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1285.507632] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance b68042fd-4955-41e4-b14f-1f8268ace377 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1285.507713] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 1593ca50-bc97-464a-86ca-17995db15758 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1285.507825] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 7e340c99-8d15-4696-98d5-a952f5d2aeee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1285.507909] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1285.508035] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 00bb8c04-3c0b-48ff-8127-13d3e15cb86c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1285.508229] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1285.508367] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1285.601011] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5cf0f6c-33b0-4a7e-b032-b94ebe542541 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.609206] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d168c0b6-d322-4300-8096-719f53d77d7e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.638958] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b263225-94fe-4686-bf3c-50b77f5653c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.646606] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed120877-43d0-4c8d-a79a-8a4df0b56c5a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.661179] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1285.789660] env[62627]: DEBUG nova.network.neutron [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1285.932012] env[62627]: DEBUG nova.network.neutron [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Updating instance_info_cache with network_info: [{"id": "d1a62857-50ae-4f10-8206-bfd58d9da6e0", "address": "fa:16:3e:c0:9a:7b", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1a62857-50", "ovs_interfaceid": "d1a62857-50ae-4f10-8206-bfd58d9da6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.163995] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1286.434719] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "refresh_cache-00bb8c04-3c0b-48ff-8127-13d3e15cb86c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1286.435070] env[62627]: DEBUG nova.compute.manager [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Instance network_info: |[{"id": "d1a62857-50ae-4f10-8206-bfd58d9da6e0", "address": "fa:16:3e:c0:9a:7b", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1a62857-50", "ovs_interfaceid": "d1a62857-50ae-4f10-8206-bfd58d9da6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1286.435495] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:9a:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a94c29-ddd5-4383-9219-1c2c3bb09cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1a62857-50ae-4f10-8206-bfd58d9da6e0', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1286.442799] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Creating folder: Project (710e7463994b4ec08bcc6d479cd1138b). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1286.443082] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e4bddf75-d184-423a-a0e8-796bd9ec4faf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.456053] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Created folder: Project (710e7463994b4ec08bcc6d479cd1138b) in parent group-v447541. [ 1286.456180] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Creating folder: Instances. Parent ref: group-v447833. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1286.456423] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae294d6e-c7f8-44ea-a036-a24ae469b3b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.466245] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Created folder: Instances in parent group-v447833. [ 1286.466495] env[62627]: DEBUG oslo.service.loopingcall [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1286.466687] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1286.466891] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c0e36cf-ad51-4895-b7d1-093580bb8963 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.486552] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1286.486552] env[62627]: value = "task-2195348" [ 1286.486552] env[62627]: _type = "Task" [ 1286.486552] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.494178] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195348, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.669724] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1286.670091] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.191s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.681681] env[62627]: DEBUG nova.compute.manager [req-ecde7ed9-7810-49eb-abea-3437977941e7 req-612b0e61-1f26-4097-b6b1-8e8fd58335bf service nova] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Received event network-changed-d1a62857-50ae-4f10-8206-bfd58d9da6e0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1286.681935] env[62627]: DEBUG nova.compute.manager [req-ecde7ed9-7810-49eb-abea-3437977941e7 req-612b0e61-1f26-4097-b6b1-8e8fd58335bf service nova] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Refreshing instance network info cache due to event network-changed-d1a62857-50ae-4f10-8206-bfd58d9da6e0. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1286.682238] env[62627]: DEBUG oslo_concurrency.lockutils [req-ecde7ed9-7810-49eb-abea-3437977941e7 req-612b0e61-1f26-4097-b6b1-8e8fd58335bf service nova] Acquiring lock "refresh_cache-00bb8c04-3c0b-48ff-8127-13d3e15cb86c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1286.682423] env[62627]: DEBUG oslo_concurrency.lockutils [req-ecde7ed9-7810-49eb-abea-3437977941e7 req-612b0e61-1f26-4097-b6b1-8e8fd58335bf service nova] Acquired lock "refresh_cache-00bb8c04-3c0b-48ff-8127-13d3e15cb86c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.682602] env[62627]: DEBUG nova.network.neutron [req-ecde7ed9-7810-49eb-abea-3437977941e7 req-612b0e61-1f26-4097-b6b1-8e8fd58335bf service nova] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Refreshing network info cache for port d1a62857-50ae-4f10-8206-bfd58d9da6e0 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1286.996999] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195348, 'name': CreateVM_Task, 'duration_secs': 0.314072} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.997188] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1286.997859] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1286.998030] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.998356] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1286.998640] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aaa67e4-20f5-45ac-a95f-e00ad8dcde05 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.003225] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1287.003225] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]526df173-dd71-9889-4b61-37a91a6e48a3" [ 1287.003225] env[62627]: _type = "Task" [ 1287.003225] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.010680] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526df173-dd71-9889-4b61-37a91a6e48a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.386293] env[62627]: DEBUG nova.network.neutron [req-ecde7ed9-7810-49eb-abea-3437977941e7 req-612b0e61-1f26-4097-b6b1-8e8fd58335bf service nova] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Updated VIF entry in instance network info cache for port d1a62857-50ae-4f10-8206-bfd58d9da6e0. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1287.386696] env[62627]: DEBUG nova.network.neutron [req-ecde7ed9-7810-49eb-abea-3437977941e7 req-612b0e61-1f26-4097-b6b1-8e8fd58335bf service nova] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Updating instance_info_cache with network_info: [{"id": "d1a62857-50ae-4f10-8206-bfd58d9da6e0", "address": "fa:16:3e:c0:9a:7b", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1a62857-50", "ovs_interfaceid": "d1a62857-50ae-4f10-8206-bfd58d9da6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.514742] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526df173-dd71-9889-4b61-37a91a6e48a3, 'name': SearchDatastore_Task, 'duration_secs': 0.012595} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.515063] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1287.515328] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1287.515574] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1287.515725] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.515901] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1287.516181] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57076fa9-f5d2-4af4-8540-9d0aa7ddb10f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.526099] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1287.526285] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1287.527600] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb8e1906-a1fa-46a7-b099-61e7e7b062c6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.533890] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1287.533890] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5274a398-d037-ff96-fba9-57f0cef9688a" [ 1287.533890] env[62627]: _type = "Task" [ 1287.533890] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.542217] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5274a398-d037-ff96-fba9-57f0cef9688a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.889882] env[62627]: DEBUG oslo_concurrency.lockutils [req-ecde7ed9-7810-49eb-abea-3437977941e7 req-612b0e61-1f26-4097-b6b1-8e8fd58335bf service nova] Releasing lock "refresh_cache-00bb8c04-3c0b-48ff-8127-13d3e15cb86c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1288.045878] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5274a398-d037-ff96-fba9-57f0cef9688a, 'name': SearchDatastore_Task, 'duration_secs': 0.008845} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.046638] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7f3826e-b7cc-4b87-a43c-f4017a08d1ce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.051792] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1288.051792] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5258a945-7940-c8d7-8b39-f9740c8a3d2f" [ 1288.051792] env[62627]: _type = "Task" [ 1288.051792] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.059678] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5258a945-7940-c8d7-8b39-f9740c8a3d2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.562456] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5258a945-7940-c8d7-8b39-f9740c8a3d2f, 'name': SearchDatastore_Task, 'duration_secs': 0.009837} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.562669] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1288.562930] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 00bb8c04-3c0b-48ff-8127-13d3e15cb86c/00bb8c04-3c0b-48ff-8127-13d3e15cb86c.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1288.563208] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4337f0ee-77b5-4082-aa96-903e7ae4250c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.569802] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1288.569802] env[62627]: value = "task-2195350" [ 1288.569802] env[62627]: _type = "Task" [ 1288.569802] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.577936] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195350, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.080784] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195350, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.339581] env[62627]: DEBUG oslo_concurrency.lockutils [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.339915] env[62627]: DEBUG oslo_concurrency.lockutils [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.581135] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195350, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549863} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.581398] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 00bb8c04-3c0b-48ff-8127-13d3e15cb86c/00bb8c04-3c0b-48ff-8127-13d3e15cb86c.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1289.581605] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1289.581863] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f44dabc-05bd-4a13-9b7f-bc164f96441c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.589150] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1289.589150] env[62627]: value = "task-2195351" [ 1289.589150] env[62627]: _type = "Task" [ 1289.589150] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.598283] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195351, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.842809] env[62627]: DEBUG nova.compute.manager [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1289.882621] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1289.882906] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447832', 'volume_id': 'ade44e27-be53-4d9a-b346-ccf17aadfded', 'name': 'volume-ade44e27-be53-4d9a-b346-ccf17aadfded', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1593ca50-bc97-464a-86ca-17995db15758', 'attached_at': '', 'detached_at': '', 'volume_id': 'ade44e27-be53-4d9a-b346-ccf17aadfded', 'serial': 'ade44e27-be53-4d9a-b346-ccf17aadfded'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1289.884009] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6ce2d6-765d-4f45-b643-d26534504d6c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.901071] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05f388f-9e8e-4e74-85c3-a33bd7354957 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.925446] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] volume-ade44e27-be53-4d9a-b346-ccf17aadfded/volume-ade44e27-be53-4d9a-b346-ccf17aadfded.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1289.925739] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-265e8dba-a3ca-4641-9fc7-b1f89d9d4001 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.945451] env[62627]: DEBUG oslo_vmware.api [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1289.945451] env[62627]: value = "task-2195352" [ 1289.945451] env[62627]: _type = "Task" [ 1289.945451] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.953637] env[62627]: DEBUG oslo_vmware.api [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195352, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.102644] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195351, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071156} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.103029] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1290.103712] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c80265-e234-48d2-b901-0e9552ff2f42 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.127028] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 00bb8c04-3c0b-48ff-8127-13d3e15cb86c/00bb8c04-3c0b-48ff-8127-13d3e15cb86c.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1290.127028] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5c6b172-46f6-4894-aa1c-423d6307591f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.148254] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1290.148254] env[62627]: value = "task-2195353" [ 1290.148254] env[62627]: _type = "Task" [ 1290.148254] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.157035] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195353, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.368847] env[62627]: DEBUG oslo_concurrency.lockutils [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.369271] env[62627]: DEBUG oslo_concurrency.lockutils [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.371611] env[62627]: INFO nova.compute.claims [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1290.457880] env[62627]: DEBUG oslo_vmware.api [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195352, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.658282] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195353, 'name': ReconfigVM_Task, 'duration_secs': 0.503175} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.658567] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 00bb8c04-3c0b-48ff-8127-13d3e15cb86c/00bb8c04-3c0b-48ff-8127-13d3e15cb86c.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1290.659236] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81366bdf-1b45-4710-8fb0-b9dd2518c48e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.666565] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1290.666565] env[62627]: value = "task-2195354" [ 1290.666565] env[62627]: _type = "Task" [ 1290.666565] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.674548] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195354, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.956817] env[62627]: DEBUG oslo_vmware.api [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195352, 'name': ReconfigVM_Task, 'duration_secs': 0.686005} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.957574] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfigured VM instance instance-00000067 to attach disk [datastore2] volume-ade44e27-be53-4d9a-b346-ccf17aadfded/volume-ade44e27-be53-4d9a-b346-ccf17aadfded.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1290.962223] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a82f805a-8241-4eef-969e-d5cd58cbcf56 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.977564] env[62627]: DEBUG oslo_vmware.api [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1290.977564] env[62627]: value = "task-2195355" [ 1290.977564] env[62627]: _type = "Task" [ 1290.977564] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.986668] env[62627]: DEBUG oslo_vmware.api [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195355, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.177666] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195354, 'name': Rename_Task, 'duration_secs': 0.187736} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.178060] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1291.178247] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35931fbb-d783-4bc9-bb19-5cca4c866443 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.186145] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1291.186145] env[62627]: value = "task-2195356" [ 1291.186145] env[62627]: _type = "Task" [ 1291.186145] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.195504] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195356, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.291664] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "b68042fd-4955-41e4-b14f-1f8268ace377" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.291949] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "b68042fd-4955-41e4-b14f-1f8268ace377" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.292189] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "b68042fd-4955-41e4-b14f-1f8268ace377-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.292383] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "b68042fd-4955-41e4-b14f-1f8268ace377-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.292558] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "b68042fd-4955-41e4-b14f-1f8268ace377-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.294989] env[62627]: INFO nova.compute.manager [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Terminating instance [ 1291.491347] env[62627]: DEBUG oslo_vmware.api [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195355, 'name': ReconfigVM_Task, 'duration_secs': 0.146478} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.494808] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447832', 'volume_id': 'ade44e27-be53-4d9a-b346-ccf17aadfded', 'name': 'volume-ade44e27-be53-4d9a-b346-ccf17aadfded', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1593ca50-bc97-464a-86ca-17995db15758', 'attached_at': '', 'detached_at': '', 'volume_id': 'ade44e27-be53-4d9a-b346-ccf17aadfded', 'serial': 'ade44e27-be53-4d9a-b346-ccf17aadfded'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1291.508324] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b7fd41-b422-46b3-abe4-4c2fc9d9a784 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.517267] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b073da8-735c-4dfa-bf79-1c673023f330 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.550233] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad444a6-90c2-4465-9a53-69a965b81b6d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.558965] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e29168-3b40-4b89-a7ee-14eae530064b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.574023] env[62627]: DEBUG nova.compute.provider_tree [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1291.699656] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.799368] env[62627]: DEBUG nova.compute.manager [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1291.799612] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1291.800482] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935541cc-1eee-432e-822d-6034cbbbc324 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.808421] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1291.808709] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1cc6594a-0eec-4e97-b47b-97c4834ab39b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.815021] env[62627]: DEBUG oslo_vmware.api [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1291.815021] env[62627]: value = "task-2195357" [ 1291.815021] env[62627]: _type = "Task" [ 1291.815021] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.822889] env[62627]: DEBUG oslo_vmware.api [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.076836] env[62627]: DEBUG nova.scheduler.client.report [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1292.197219] env[62627]: DEBUG oslo_vmware.api [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195356, 'name': PowerOnVM_Task, 'duration_secs': 0.593701} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.197569] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1292.197687] env[62627]: INFO nova.compute.manager [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Took 7.86 seconds to spawn the instance on the hypervisor. [ 1292.197816] env[62627]: DEBUG nova.compute.manager [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1292.198622] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d56748d-6741-4714-b5c3-f995ebf10f97 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.324652] env[62627]: DEBUG oslo_vmware.api [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195357, 'name': PowerOffVM_Task, 'duration_secs': 0.209089} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.324914] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1292.325096] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1292.325339] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99f58f09-73ad-4465-8872-72c68fb71576 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.386415] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1292.386703] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1292.386895] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleting the datastore file [datastore2] b68042fd-4955-41e4-b14f-1f8268ace377 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1292.387183] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33795a7f-ce6f-4172-ac3b-8786935f123f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.394095] env[62627]: DEBUG oslo_vmware.api [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1292.394095] env[62627]: value = "task-2195359" [ 1292.394095] env[62627]: _type = "Task" [ 1292.394095] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.401980] env[62627]: DEBUG oslo_vmware.api [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195359, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.533814] env[62627]: DEBUG nova.objects.instance [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'flavor' on Instance uuid 1593ca50-bc97-464a-86ca-17995db15758 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1292.582201] env[62627]: DEBUG oslo_concurrency.lockutils [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.213s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.582592] env[62627]: DEBUG nova.compute.manager [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1292.717892] env[62627]: INFO nova.compute.manager [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Took 12.66 seconds to build instance. [ 1292.906026] env[62627]: DEBUG oslo_vmware.api [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195359, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167681} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.906149] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1292.906270] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1292.906448] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1292.906647] env[62627]: INFO nova.compute.manager [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1292.906912] env[62627]: DEBUG oslo.service.loopingcall [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1292.907118] env[62627]: DEBUG nova.compute.manager [-] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1292.907260] env[62627]: DEBUG nova.network.neutron [-] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1293.039554] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a78283f6-8371-4e8c-bd2a-c91671b6f263 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.254s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.089360] env[62627]: DEBUG nova.compute.utils [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1293.091530] env[62627]: DEBUG nova.compute.manager [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1293.091530] env[62627]: DEBUG nova.network.neutron [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1293.180584] env[62627]: DEBUG nova.policy [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ade098da576147a6bff6685cb886ff78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a97f57bdf4a46db80460b1d5c9f9b9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1293.219996] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4e20259e-4a12-4b30-9a31-f564aabcb0c7 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.168s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.220338] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.324s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.220451] env[62627]: INFO nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] During sync_power_state the instance has a pending task (block_device_mapping). Skip. [ 1293.220623] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.509367] env[62627]: DEBUG nova.compute.manager [req-98146982-13a0-480b-879c-e993f2f4e1f1 req-bbd9a731-0705-4d05-800c-79fa45516dda service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Received event network-vif-deleted-9a79e592-1487-4b96-93f2-66df8b0224a4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1293.509535] env[62627]: INFO nova.compute.manager [req-98146982-13a0-480b-879c-e993f2f4e1f1 req-bbd9a731-0705-4d05-800c-79fa45516dda service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Neutron deleted interface 9a79e592-1487-4b96-93f2-66df8b0224a4; detaching it from the instance and deleting it from the info cache [ 1293.509738] env[62627]: DEBUG nova.network.neutron [req-98146982-13a0-480b-879c-e993f2f4e1f1 req-bbd9a731-0705-4d05-800c-79fa45516dda service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.519094] env[62627]: DEBUG nova.network.neutron [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Successfully created port: 213d4e79-8676-47f0-9e73-c3a36df750d1 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1293.595523] env[62627]: DEBUG nova.compute.manager [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1293.951384] env[62627]: DEBUG nova.compute.manager [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Stashing vm_state: active {{(pid=62627) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1293.989143] env[62627]: DEBUG nova.network.neutron [-] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.011973] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-870a5b03-b426-4295-80ed-d5b7a1ba0d5c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.023501] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3a96fb-0bc4-47b6-9bfe-56ba377758c3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.055982] env[62627]: DEBUG nova.compute.manager [req-98146982-13a0-480b-879c-e993f2f4e1f1 req-bbd9a731-0705-4d05-800c-79fa45516dda service nova] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Detach interface failed, port_id=9a79e592-1487-4b96-93f2-66df8b0224a4, reason: Instance b68042fd-4955-41e4-b14f-1f8268ace377 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1294.102415] env[62627]: INFO nova.virt.block_device [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Booting with volume 68c69bdc-55ac-48af-b285-e6d2d8dd861d at /dev/sda [ 1294.139799] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ac67033-beb8-42b3-9511-087408328c06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.149424] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f23a87-d0ef-459a-9aff-317fd3d29146 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.178810] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd51ff37-d282-4c32-a12f-546c691ef68c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.187890] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1670ef-c1fd-4812-9821-203c283e3cd4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.217851] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b1508d-ef55-4ef6-83a5-aef4f52e2f95 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.225198] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c10f5f-ddc8-4d7e-8413-08a8328891a6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.240926] env[62627]: DEBUG nova.virt.block_device [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating existing volume attachment record: 676cde7a-f415-478c-b42f-0fbdf0e0f609 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1294.471494] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.471720] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.491888] env[62627]: INFO nova.compute.manager [-] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Took 1.58 seconds to deallocate network for instance. [ 1294.575058] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "680afe04-ddc8-4e14-9260-509dded09a0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.575332] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "680afe04-ddc8-4e14-9260-509dded09a0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.977224] env[62627]: INFO nova.compute.claims [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1294.998294] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.072233] env[62627]: DEBUG nova.compute.manager [req-bb3f7167-4dd7-4b4b-8aad-9aee01f84a68 req-4c849422-e271-4bbf-ac31-d74b7344df7b service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Received event network-vif-plugged-213d4e79-8676-47f0-9e73-c3a36df750d1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1295.072233] env[62627]: DEBUG oslo_concurrency.lockutils [req-bb3f7167-4dd7-4b4b-8aad-9aee01f84a68 req-4c849422-e271-4bbf-ac31-d74b7344df7b service nova] Acquiring lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.072613] env[62627]: DEBUG oslo_concurrency.lockutils [req-bb3f7167-4dd7-4b4b-8aad-9aee01f84a68 req-4c849422-e271-4bbf-ac31-d74b7344df7b service nova] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.072613] env[62627]: DEBUG oslo_concurrency.lockutils [req-bb3f7167-4dd7-4b4b-8aad-9aee01f84a68 req-4c849422-e271-4bbf-ac31-d74b7344df7b service nova] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.072882] env[62627]: DEBUG nova.compute.manager [req-bb3f7167-4dd7-4b4b-8aad-9aee01f84a68 req-4c849422-e271-4bbf-ac31-d74b7344df7b service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] No waiting events found dispatching network-vif-plugged-213d4e79-8676-47f0-9e73-c3a36df750d1 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1295.072992] env[62627]: WARNING nova.compute.manager [req-bb3f7167-4dd7-4b4b-8aad-9aee01f84a68 req-4c849422-e271-4bbf-ac31-d74b7344df7b service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Received unexpected event network-vif-plugged-213d4e79-8676-47f0-9e73-c3a36df750d1 for instance with vm_state building and task_state block_device_mapping. [ 1295.077847] env[62627]: DEBUG nova.compute.manager [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1295.155682] env[62627]: DEBUG nova.network.neutron [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Successfully updated port: 213d4e79-8676-47f0-9e73-c3a36df750d1 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1295.483517] env[62627]: INFO nova.compute.resource_tracker [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating resource usage from migration 353cbf1f-0e8f-485e-beba-4bf4f09e0497 [ 1295.596127] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.605363] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab2c93a-df6a-4178-887e-7035bc12b71c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.613448] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556af77b-6939-4a35-b617-507bafe5dda6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.646590] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5b8e25-2db3-48b0-9b20-90ddc5779cb9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.654761] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7301a29-17a0-4806-925e-d164190a2dfd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.659269] env[62627]: DEBUG oslo_concurrency.lockutils [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.659269] env[62627]: DEBUG oslo_concurrency.lockutils [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.659269] env[62627]: DEBUG nova.network.neutron [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1295.670583] env[62627]: DEBUG nova.compute.provider_tree [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1296.173973] env[62627]: DEBUG nova.scheduler.client.report [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1296.196341] env[62627]: DEBUG nova.network.neutron [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1296.325073] env[62627]: DEBUG nova.compute.manager [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1296.325624] env[62627]: DEBUG nova.virt.hardware [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1296.325842] env[62627]: DEBUG nova.virt.hardware [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1296.326013] env[62627]: DEBUG nova.virt.hardware [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1296.326222] env[62627]: DEBUG nova.virt.hardware [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1296.326374] env[62627]: DEBUG nova.virt.hardware [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1296.326522] env[62627]: DEBUG nova.virt.hardware [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1296.326723] env[62627]: DEBUG nova.virt.hardware [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1296.326882] env[62627]: DEBUG nova.virt.hardware [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1296.327239] env[62627]: DEBUG nova.virt.hardware [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1296.327460] env[62627]: DEBUG nova.virt.hardware [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1296.327642] env[62627]: DEBUG nova.virt.hardware [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1296.328531] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5544a46c-3d0d-4ecc-9593-7ca5cfadd629 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.337440] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d250899-9659-4f24-9b7d-6a95f1876fe7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.582661] env[62627]: DEBUG nova.network.neutron [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance_info_cache with network_info: [{"id": "213d4e79-8676-47f0-9e73-c3a36df750d1", "address": "fa:16:3e:38:51:83", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap213d4e79-86", "ovs_interfaceid": "213d4e79-8676-47f0-9e73-c3a36df750d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.678692] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.207s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.678929] env[62627]: INFO nova.compute.manager [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Migrating [ 1296.689714] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.692s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.689940] env[62627]: DEBUG nova.objects.instance [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'resources' on Instance uuid b68042fd-4955-41e4-b14f-1f8268ace377 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1297.085386] env[62627]: DEBUG oslo_concurrency.lockutils [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.085784] env[62627]: DEBUG nova.compute.manager [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Instance network_info: |[{"id": "213d4e79-8676-47f0-9e73-c3a36df750d1", "address": "fa:16:3e:38:51:83", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap213d4e79-86", "ovs_interfaceid": "213d4e79-8676-47f0-9e73-c3a36df750d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1297.086219] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:51:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f499bc9-78da-46c1-9274-19edf26d31cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '213d4e79-8676-47f0-9e73-c3a36df750d1', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1297.093870] env[62627]: DEBUG oslo.service.loopingcall [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1297.094093] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1297.094314] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2faf01e1-9b10-4384-a0d1-d41d61c22fa6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.110358] env[62627]: DEBUG nova.compute.manager [req-7ee6fad8-78c3-40b2-bbe2-fc189bc425cc req-d8f564d7-6420-47a4-94db-ecf2cdf7377c service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Received event network-changed-213d4e79-8676-47f0-9e73-c3a36df750d1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1297.110531] env[62627]: DEBUG nova.compute.manager [req-7ee6fad8-78c3-40b2-bbe2-fc189bc425cc req-d8f564d7-6420-47a4-94db-ecf2cdf7377c service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Refreshing instance network info cache due to event network-changed-213d4e79-8676-47f0-9e73-c3a36df750d1. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1297.110743] env[62627]: DEBUG oslo_concurrency.lockutils [req-7ee6fad8-78c3-40b2-bbe2-fc189bc425cc req-d8f564d7-6420-47a4-94db-ecf2cdf7377c service nova] Acquiring lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.110887] env[62627]: DEBUG oslo_concurrency.lockutils [req-7ee6fad8-78c3-40b2-bbe2-fc189bc425cc req-d8f564d7-6420-47a4-94db-ecf2cdf7377c service nova] Acquired lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.111097] env[62627]: DEBUG nova.network.neutron [req-7ee6fad8-78c3-40b2-bbe2-fc189bc425cc req-d8f564d7-6420-47a4-94db-ecf2cdf7377c service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Refreshing network info cache for port 213d4e79-8676-47f0-9e73-c3a36df750d1 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1297.118182] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1297.118182] env[62627]: value = "task-2195360" [ 1297.118182] env[62627]: _type = "Task" [ 1297.118182] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.127581] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195360, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.196263] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.196458] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.196677] env[62627]: DEBUG nova.network.neutron [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1297.322915] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e92fad-737e-4191-9c68-acec5cc21986 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.330782] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7810e97c-a4e8-4ca2-9393-a1894edb3955 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.361270] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd13142-67e8-4bf3-be54-55280a33ede0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.369310] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c48e20-3c8b-4c32-ab88-fa15856c35a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.384372] env[62627]: DEBUG nova.compute.provider_tree [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.628872] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195360, 'name': CreateVM_Task, 'duration_secs': 0.45673} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.630032] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1297.630244] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'attachment_id': '676cde7a-f415-478c-b42f-0fbdf0e0f609', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447830', 'volume_id': '68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'name': 'volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '02b9bb2d-5752-4c09-9661-5966c2ac1bad', 'attached_at': '', 'detached_at': '', 'volume_id': '68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'serial': '68c69bdc-55ac-48af-b285-e6d2d8dd861d'}, 'boot_index': 0, 'mount_device': '/dev/sda', 'disk_bus': None, 'device_type': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=62627) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1297.630467] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Root volume attach. Driver type: vmdk {{(pid=62627) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1297.631279] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cedd80-9361-4855-a9f9-9d9f8e76a70b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.642736] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c5bd96-9fbd-4bf9-97d0-5e3259970ec3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.649778] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb51f0c7-85db-4ce1-8d27-80439e08b196 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.656170] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-557654f6-994f-4927-92c9-2a874522833f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.662721] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1297.662721] env[62627]: value = "task-2195361" [ 1297.662721] env[62627]: _type = "Task" [ 1297.662721] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.680077] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195361, 'name': RelocateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.811069] env[62627]: DEBUG nova.network.neutron [req-7ee6fad8-78c3-40b2-bbe2-fc189bc425cc req-d8f564d7-6420-47a4-94db-ecf2cdf7377c service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updated VIF entry in instance network info cache for port 213d4e79-8676-47f0-9e73-c3a36df750d1. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1297.811445] env[62627]: DEBUG nova.network.neutron [req-7ee6fad8-78c3-40b2-bbe2-fc189bc425cc req-d8f564d7-6420-47a4-94db-ecf2cdf7377c service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance_info_cache with network_info: [{"id": "213d4e79-8676-47f0-9e73-c3a36df750d1", "address": "fa:16:3e:38:51:83", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap213d4e79-86", "ovs_interfaceid": "213d4e79-8676-47f0-9e73-c3a36df750d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.888218] env[62627]: DEBUG nova.scheduler.client.report [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1297.931387] env[62627]: DEBUG nova.network.neutron [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance_info_cache with network_info: [{"id": "57af03b0-8d4d-4f61-a384-800617af9be4", "address": "fa:16:3e:02:a2:5f", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57af03b0-8d", "ovs_interfaceid": "57af03b0-8d4d-4f61-a384-800617af9be4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.174745] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195361, 'name': RelocateVM_Task, 'duration_secs': 0.383292} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.174745] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1298.174745] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447830', 'volume_id': '68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'name': 'volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '02b9bb2d-5752-4c09-9661-5966c2ac1bad', 'attached_at': '', 'detached_at': '', 'volume_id': '68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'serial': '68c69bdc-55ac-48af-b285-e6d2d8dd861d'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1298.174745] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd880eaf-c1e8-4eb7-889c-33f769cd75ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.190535] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f44d01-8e4d-431d-a838-b3d16fd2c2fc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.213360] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d/volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1298.213639] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9a77557-7492-4efa-9add-d0b1fb6a1f9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.234883] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1298.234883] env[62627]: value = "task-2195362" [ 1298.234883] env[62627]: _type = "Task" [ 1298.234883] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.245409] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195362, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.314907] env[62627]: DEBUG oslo_concurrency.lockutils [req-7ee6fad8-78c3-40b2-bbe2-fc189bc425cc req-d8f564d7-6420-47a4-94db-ecf2cdf7377c service nova] Releasing lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1298.396262] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.706s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.398901] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.803s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.400506] env[62627]: INFO nova.compute.claims [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1298.417092] env[62627]: INFO nova.scheduler.client.report [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleted allocations for instance b68042fd-4955-41e4-b14f-1f8268ace377 [ 1298.434390] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1298.746729] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195362, 'name': ReconfigVM_Task, 'duration_secs': 0.255193} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.747072] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Reconfigured VM instance instance-0000006d to attach disk [datastore2] volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d/volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1298.751711] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d57846da-2d14-433d-8689-bd1a6aafd4fe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.768650] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1298.768650] env[62627]: value = "task-2195363" [ 1298.768650] env[62627]: _type = "Task" [ 1298.768650] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.778335] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195363, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.926358] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2398972c-85cc-43ae-8719-b841be9c55c9 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "b68042fd-4955-41e4-b14f-1f8268ace377" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.634s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.279403] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195363, 'name': ReconfigVM_Task, 'duration_secs': 0.128137} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.279752] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447830', 'volume_id': '68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'name': 'volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '02b9bb2d-5752-4c09-9661-5966c2ac1bad', 'attached_at': '', 'detached_at': '', 'volume_id': '68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'serial': '68c69bdc-55ac-48af-b285-e6d2d8dd861d'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1299.280348] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f011b0b-e27b-46c3-a417-35aa1835ccbd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.289790] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1299.289790] env[62627]: value = "task-2195364" [ 1299.289790] env[62627]: _type = "Task" [ 1299.289790] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.305583] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195364, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.616459] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c4d469-5882-40fe-be9f-6639357b555f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.624313] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2b6021-8762-4c04-9d3c-a7d5a499cba3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.655405] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4727adc3-ec23-408a-9a6f-f882c13e0f93 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.663263] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186d8d7c-e405-4dfa-876a-4c6f2dfe6c79 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.676614] env[62627]: DEBUG nova.compute.provider_tree [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.799651] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195364, 'name': Rename_Task, 'duration_secs': 0.14473} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.799979] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1299.800244] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14ec0b1e-031b-4f53-91e8-5c98e43781f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.808294] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1299.808294] env[62627]: value = "task-2195365" [ 1299.808294] env[62627]: _type = "Task" [ 1299.808294] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.821788] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195365, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.948122] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de993ee-1037-4074-91db-22aaa70ca988 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.971195] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance '1593ca50-bc97-464a-86ca-17995db15758' progress to 0 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1300.180149] env[62627]: DEBUG nova.scheduler.client.report [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1300.321204] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195365, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.323661] env[62627]: DEBUG nova.compute.manager [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Stashing vm_state: active {{(pid=62627) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1300.477123] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1300.477292] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb3ddb62-fda2-477d-9594-2b444aace5a3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.485407] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1300.485407] env[62627]: value = "task-2195366" [ 1300.485407] env[62627]: _type = "Task" [ 1300.485407] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.495892] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195366, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.685590] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.287s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.686140] env[62627]: DEBUG nova.compute.manager [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1300.820197] env[62627]: DEBUG oslo_vmware.api [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195365, 'name': PowerOnVM_Task, 'duration_secs': 0.662773} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.821116] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1300.821116] env[62627]: INFO nova.compute.manager [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Took 4.50 seconds to spawn the instance on the hypervisor. [ 1300.821213] env[62627]: DEBUG nova.compute.manager [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1300.821973] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e54378f-34ae-4f50-a6e0-7726b71b30ca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.846416] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.846677] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.996220] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195366, 'name': PowerOffVM_Task, 'duration_secs': 0.414506} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.996544] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1300.996794] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance '1593ca50-bc97-464a-86ca-17995db15758' progress to 17 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1301.193075] env[62627]: DEBUG nova.compute.utils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1301.194309] env[62627]: DEBUG nova.compute.manager [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1301.194476] env[62627]: DEBUG nova.network.neutron [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1301.346093] env[62627]: INFO nova.compute.manager [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Took 11.00 seconds to build instance. [ 1301.351391] env[62627]: INFO nova.compute.claims [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1301.500435] env[62627]: DEBUG nova.policy [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd847ab3ec20743d0997defea8008448e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '710e7463994b4ec08bcc6d479cd1138b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1301.503555] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1301.503772] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1301.503928] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1301.504119] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1301.504269] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1301.504419] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1301.504615] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1301.504770] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1301.504933] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1301.505108] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1301.505278] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1301.510614] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d57c2316-0c20-4253-a6d3-a6603929cfd0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.527821] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1301.527821] env[62627]: value = "task-2195367" [ 1301.527821] env[62627]: _type = "Task" [ 1301.527821] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.539727] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195367, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.698217] env[62627]: DEBUG nova.compute.manager [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1301.847590] env[62627]: DEBUG oslo_concurrency.lockutils [None req-53fef0e2-adea-4e2c-b3a7-7724cdf467f0 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.508s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.857492] env[62627]: INFO nova.compute.resource_tracker [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating resource usage from migration bd33cf80-bb50-408b-8395-b6c800449c22 [ 1301.990368] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b2c4a2-e478-4bb9-bf5b-15ebf48a7c6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.999430] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513f86ef-7362-452d-a770-1c5100bfa1e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.046337] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7802ac-a43c-4957-b247-222e9868457d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.057831] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195367, 'name': ReconfigVM_Task, 'duration_secs': 0.174191} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.060612] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance '1593ca50-bc97-464a-86ca-17995db15758' progress to 33 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1302.066441] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59104307-9a5f-4bca-80db-c26ebd13c2ea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.073419] env[62627]: DEBUG nova.network.neutron [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Successfully created port: 00c086aa-5c1a-4b1e-8f68-aea4434492ea {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1302.088716] env[62627]: DEBUG nova.compute.provider_tree [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1302.328645] env[62627]: DEBUG nova.compute.manager [req-11af203c-4974-4728-93d8-d218b33cbc35 req-62689f9a-95b7-4583-a15d-24634b4040a0 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Received event network-changed-b0e730ac-28a5-40f0-bbe1-c4f48f060624 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1302.328862] env[62627]: DEBUG nova.compute.manager [req-11af203c-4974-4728-93d8-d218b33cbc35 req-62689f9a-95b7-4583-a15d-24634b4040a0 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Refreshing instance network info cache due to event network-changed-b0e730ac-28a5-40f0-bbe1-c4f48f060624. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1302.329094] env[62627]: DEBUG oslo_concurrency.lockutils [req-11af203c-4974-4728-93d8-d218b33cbc35 req-62689f9a-95b7-4583-a15d-24634b4040a0 service nova] Acquiring lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1302.329240] env[62627]: DEBUG oslo_concurrency.lockutils [req-11af203c-4974-4728-93d8-d218b33cbc35 req-62689f9a-95b7-4583-a15d-24634b4040a0 service nova] Acquired lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.329402] env[62627]: DEBUG nova.network.neutron [req-11af203c-4974-4728-93d8-d218b33cbc35 req-62689f9a-95b7-4583-a15d-24634b4040a0 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Refreshing network info cache for port b0e730ac-28a5-40f0-bbe1-c4f48f060624 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1302.574926] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1302.575090] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1302.575868] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1302.575868] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1302.575868] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1302.575868] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1302.575868] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1302.576091] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1302.576187] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1302.576355] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1302.576525] env[62627]: DEBUG nova.virt.hardware [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1302.583032] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfiguring VM instance instance-00000067 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1302.583032] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b8468cc-a1db-42c9-b4be-35bb4d8fbffa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.599021] env[62627]: DEBUG nova.scheduler.client.report [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1302.608694] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1302.608694] env[62627]: value = "task-2195368" [ 1302.608694] env[62627]: _type = "Task" [ 1302.608694] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.620997] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195368, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.707716] env[62627]: DEBUG nova.compute.manager [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1302.739017] env[62627]: DEBUG nova.virt.hardware [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1302.739293] env[62627]: DEBUG nova.virt.hardware [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1302.739449] env[62627]: DEBUG nova.virt.hardware [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1302.739634] env[62627]: DEBUG nova.virt.hardware [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1302.739786] env[62627]: DEBUG nova.virt.hardware [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1302.739932] env[62627]: DEBUG nova.virt.hardware [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1302.740164] env[62627]: DEBUG nova.virt.hardware [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1302.740325] env[62627]: DEBUG nova.virt.hardware [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1302.740489] env[62627]: DEBUG nova.virt.hardware [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1302.740650] env[62627]: DEBUG nova.virt.hardware [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1302.740819] env[62627]: DEBUG nova.virt.hardware [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1302.741739] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47f2c35-1d2c-4024-8ab8-53bd5d5ca3f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.750715] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53463546-6f82-4722-843a-3b7180c27798 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.043597] env[62627]: DEBUG nova.network.neutron [req-11af203c-4974-4728-93d8-d218b33cbc35 req-62689f9a-95b7-4583-a15d-24634b4040a0 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Updated VIF entry in instance network info cache for port b0e730ac-28a5-40f0-bbe1-c4f48f060624. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1303.043996] env[62627]: DEBUG nova.network.neutron [req-11af203c-4974-4728-93d8-d218b33cbc35 req-62689f9a-95b7-4583-a15d-24634b4040a0 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Updating instance_info_cache with network_info: [{"id": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "address": "fa:16:3e:66:a3:d6", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0e730ac-28", "ovs_interfaceid": "b0e730ac-28a5-40f0-bbe1-c4f48f060624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.100660] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.254s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1303.100932] env[62627]: INFO nova.compute.manager [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Migrating [ 1303.125599] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195368, 'name': ReconfigVM_Task, 'duration_secs': 0.206521} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.125904] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfigured VM instance instance-00000067 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1303.126744] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3162be-b4ed-4a19-88f0-155eee6ba270 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.155992] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 1593ca50-bc97-464a-86ca-17995db15758/1593ca50-bc97-464a-86ca-17995db15758.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1303.156333] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fa27af7-1002-4f31-8dbc-fc23bada8363 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.176567] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1303.176567] env[62627]: value = "task-2195369" [ 1303.176567] env[62627]: _type = "Task" [ 1303.176567] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.186290] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195369, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.546823] env[62627]: DEBUG oslo_concurrency.lockutils [req-11af203c-4974-4728-93d8-d218b33cbc35 req-62689f9a-95b7-4583-a15d-24634b4040a0 service nova] Releasing lock "refresh_cache-6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1303.615811] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.615996] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.616191] env[62627]: DEBUG nova.network.neutron [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1303.688103] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195369, 'name': ReconfigVM_Task, 'duration_secs': 0.357596} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.688414] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 1593ca50-bc97-464a-86ca-17995db15758/1593ca50-bc97-464a-86ca-17995db15758.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1303.688759] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance '1593ca50-bc97-464a-86ca-17995db15758' progress to 50 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1303.734603] env[62627]: DEBUG nova.network.neutron [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Successfully updated port: 00c086aa-5c1a-4b1e-8f68-aea4434492ea {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1303.777559] env[62627]: DEBUG nova.compute.manager [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Stashing vm_state: active {{(pid=62627) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1304.196394] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19517d95-b9aa-463b-833b-fc876214de08 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.222336] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485812f8-556d-4f84-8828-5789d1ca8683 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.244754] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "refresh_cache-680afe04-ddc8-4e14-9260-509dded09a0b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.244915] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "refresh_cache-680afe04-ddc8-4e14-9260-509dded09a0b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.245077] env[62627]: DEBUG nova.network.neutron [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1304.246367] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance '1593ca50-bc97-464a-86ca-17995db15758' progress to 67 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1304.317291] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.317583] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.361710] env[62627]: DEBUG nova.compute.manager [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Received event network-changed-213d4e79-8676-47f0-9e73-c3a36df750d1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1304.361915] env[62627]: DEBUG nova.compute.manager [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Refreshing instance network info cache due to event network-changed-213d4e79-8676-47f0-9e73-c3a36df750d1. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1304.362231] env[62627]: DEBUG oslo_concurrency.lockutils [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] Acquiring lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.362387] env[62627]: DEBUG oslo_concurrency.lockutils [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] Acquired lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.362552] env[62627]: DEBUG nova.network.neutron [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Refreshing network info cache for port 213d4e79-8676-47f0-9e73-c3a36df750d1 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1304.421576] env[62627]: DEBUG nova.network.neutron [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance_info_cache with network_info: [{"id": "5825a537-f659-4e7e-907e-e22208e06c0c", "address": "fa:16:3e:a9:0b:9d", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5825a537-f6", "ovs_interfaceid": "5825a537-f659-4e7e-907e-e22208e06c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.784687] env[62627]: DEBUG nova.network.neutron [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1304.822432] env[62627]: INFO nova.compute.claims [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1304.924123] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.001229] env[62627]: DEBUG nova.network.neutron [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Updating instance_info_cache with network_info: [{"id": "00c086aa-5c1a-4b1e-8f68-aea4434492ea", "address": "fa:16:3e:ee:72:06", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00c086aa-5c", "ovs_interfaceid": "00c086aa-5c1a-4b1e-8f68-aea4434492ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.160962] env[62627]: DEBUG nova.network.neutron [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updated VIF entry in instance network info cache for port 213d4e79-8676-47f0-9e73-c3a36df750d1. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1305.161089] env[62627]: DEBUG nova.network.neutron [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance_info_cache with network_info: [{"id": "213d4e79-8676-47f0-9e73-c3a36df750d1", "address": "fa:16:3e:38:51:83", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap213d4e79-86", "ovs_interfaceid": "213d4e79-8676-47f0-9e73-c3a36df750d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.328744] env[62627]: INFO nova.compute.resource_tracker [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating resource usage from migration 98c06cf0-c052-4b55-bb5e-c646ddabcc85 [ 1305.465274] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b193033b-f90d-4743-8057-bae3673fd498 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.474723] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b0c22f-4ff7-40d9-a132-c1ec350a9649 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.505336] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9340dd6-e5d1-45ec-9d11-3cea22156823 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.507915] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "refresh_cache-680afe04-ddc8-4e14-9260-509dded09a0b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.508219] env[62627]: DEBUG nova.compute.manager [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Instance network_info: |[{"id": "00c086aa-5c1a-4b1e-8f68-aea4434492ea", "address": "fa:16:3e:ee:72:06", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00c086aa-5c", "ovs_interfaceid": "00c086aa-5c1a-4b1e-8f68-aea4434492ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1305.508598] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:72:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a94c29-ddd5-4383-9219-1c2c3bb09cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00c086aa-5c1a-4b1e-8f68-aea4434492ea', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1305.515756] env[62627]: DEBUG oslo.service.loopingcall [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1305.516555] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1305.516780] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0735d4a8-2e6e-4daa-9d68-b89705e416aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.533400] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201fbc3d-e956-404c-86a6-feb634781490 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.538563] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1305.538563] env[62627]: value = "task-2195370" [ 1305.538563] env[62627]: _type = "Task" [ 1305.538563] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.549033] env[62627]: DEBUG nova.compute.provider_tree [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1305.555470] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195370, 'name': CreateVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.663641] env[62627]: DEBUG oslo_concurrency.lockutils [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] Releasing lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.663863] env[62627]: DEBUG nova.compute.manager [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Received event network-vif-plugged-00c086aa-5c1a-4b1e-8f68-aea4434492ea {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1305.664122] env[62627]: DEBUG oslo_concurrency.lockutils [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] Acquiring lock "680afe04-ddc8-4e14-9260-509dded09a0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.664400] env[62627]: DEBUG oslo_concurrency.lockutils [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] Lock "680afe04-ddc8-4e14-9260-509dded09a0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.664625] env[62627]: DEBUG oslo_concurrency.lockutils [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] Lock "680afe04-ddc8-4e14-9260-509dded09a0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1305.664835] env[62627]: DEBUG nova.compute.manager [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] No waiting events found dispatching network-vif-plugged-00c086aa-5c1a-4b1e-8f68-aea4434492ea {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1305.665020] env[62627]: WARNING nova.compute.manager [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Received unexpected event network-vif-plugged-00c086aa-5c1a-4b1e-8f68-aea4434492ea for instance with vm_state building and task_state spawning. [ 1305.665200] env[62627]: DEBUG nova.compute.manager [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Received event network-changed-00c086aa-5c1a-4b1e-8f68-aea4434492ea {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1305.665354] env[62627]: DEBUG nova.compute.manager [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Refreshing instance network info cache due to event network-changed-00c086aa-5c1a-4b1e-8f68-aea4434492ea. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1305.665569] env[62627]: DEBUG oslo_concurrency.lockutils [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] Acquiring lock "refresh_cache-680afe04-ddc8-4e14-9260-509dded09a0b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1305.665721] env[62627]: DEBUG oslo_concurrency.lockutils [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] Acquired lock "refresh_cache-680afe04-ddc8-4e14-9260-509dded09a0b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.665883] env[62627]: DEBUG nova.network.neutron [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Refreshing network info cache for port 00c086aa-5c1a-4b1e-8f68-aea4434492ea {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1305.930654] env[62627]: DEBUG nova.network.neutron [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Port 57af03b0-8d4d-4f61-a384-800617af9be4 binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1306.714187] env[62627]: DEBUG nova.scheduler.client.report [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1306.724718] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195370, 'name': CreateVM_Task, 'duration_secs': 0.293771} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.725174] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1306.725911] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddb8842-cfae-4ae9-a153-5f70b7060f1e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.728996] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.729176] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.731384] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1306.731384] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17dd6751-1562-4709-afd7-251666ca85be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.749897] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance 'e9d33611-e373-4303-937d-b43d893e51f5' progress to 0 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1306.757022] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1306.757022] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52367a08-e57d-e43a-a421-a8a357ad97ee" [ 1306.757022] env[62627]: _type = "Task" [ 1306.757022] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.763428] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52367a08-e57d-e43a-a421-a8a357ad97ee, 'name': SearchDatastore_Task, 'duration_secs': 0.010599} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.765853] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.766094] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1306.766317] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.766460] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.766635] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1306.766878] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9017b7af-0c80-4645-ac6f-b7d0cbdfe66f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.775953] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1306.776139] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1306.776840] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-730682f9-3cf6-4115-a1e3-e5e1cd3725ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.783394] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1306.783394] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dbc238-6b92-13ee-b7d6-2f534307407c" [ 1306.783394] env[62627]: _type = "Task" [ 1306.783394] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.791814] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dbc238-6b92-13ee-b7d6-2f534307407c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.937174] env[62627]: DEBUG nova.network.neutron [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Updated VIF entry in instance network info cache for port 00c086aa-5c1a-4b1e-8f68-aea4434492ea. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1306.937551] env[62627]: DEBUG nova.network.neutron [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Updating instance_info_cache with network_info: [{"id": "00c086aa-5c1a-4b1e-8f68-aea4434492ea", "address": "fa:16:3e:ee:72:06", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00c086aa-5c", "ovs_interfaceid": "00c086aa-5c1a-4b1e-8f68-aea4434492ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.226213] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.908s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.226440] env[62627]: INFO nova.compute.manager [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Migrating [ 1307.251643] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "1593ca50-bc97-464a-86ca-17995db15758-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.251643] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.251643] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.255657] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1307.255986] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de711efd-4b16-49b5-94e9-195df3158e88 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.264683] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1307.264683] env[62627]: value = "task-2195371" [ 1307.264683] env[62627]: _type = "Task" [ 1307.264683] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.273587] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.295023] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52dbc238-6b92-13ee-b7d6-2f534307407c, 'name': SearchDatastore_Task, 'duration_secs': 0.010454} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.295805] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49755dc8-3fe7-442a-aa9b-038c76d9fc0c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.301579] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1307.301579] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fce4be-fc3c-37d1-0f47-fa1e943fbb91" [ 1307.301579] env[62627]: _type = "Task" [ 1307.301579] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.309917] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fce4be-fc3c-37d1-0f47-fa1e943fbb91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.440711] env[62627]: DEBUG oslo_concurrency.lockutils [req-9892c3e6-c74e-4049-9359-7f90cc593d95 req-3a38950d-5c26-4c5f-8ca6-e7906ce9b921 service nova] Releasing lock "refresh_cache-680afe04-ddc8-4e14-9260-509dded09a0b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.751768] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.752186] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.752186] env[62627]: DEBUG nova.network.neutron [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1307.774957] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195371, 'name': PowerOffVM_Task, 'duration_secs': 0.226044} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.775240] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1307.775419] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance 'e9d33611-e373-4303-937d-b43d893e51f5' progress to 17 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1307.812980] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fce4be-fc3c-37d1-0f47-fa1e943fbb91, 'name': SearchDatastore_Task, 'duration_secs': 0.0099} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.813249] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.813495] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 680afe04-ddc8-4e14-9260-509dded09a0b/680afe04-ddc8-4e14-9260-509dded09a0b.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1307.813748] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f342f26-de7d-4f4d-b47b-6e7f4a0c729a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.821347] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1307.821347] env[62627]: value = "task-2195372" [ 1307.821347] env[62627]: _type = "Task" [ 1307.821347] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.830021] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.282070] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1308.282353] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1308.282450] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1308.282630] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1308.282775] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1308.282920] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1308.283199] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1308.283455] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1308.283685] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1308.283865] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1308.284057] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1308.289883] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a514599-53d6-4455-85e6-acc52bf23291 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.307379] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1308.307379] env[62627]: value = "task-2195373" [ 1308.307379] env[62627]: _type = "Task" [ 1308.307379] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.316148] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.316345] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.316519] env[62627]: DEBUG nova.network.neutron [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1308.322417] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195373, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.331932] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469347} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.332163] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 680afe04-ddc8-4e14-9260-509dded09a0b/680afe04-ddc8-4e14-9260-509dded09a0b.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1308.332373] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1308.332653] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e737d3e-7bfe-4cc2-b494-818c90eb59a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.340137] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1308.340137] env[62627]: value = "task-2195374" [ 1308.340137] env[62627]: _type = "Task" [ 1308.340137] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.349139] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195374, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.596274] env[62627]: DEBUG nova.network.neutron [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance_info_cache with network_info: [{"id": "213d4e79-8676-47f0-9e73-c3a36df750d1", "address": "fa:16:3e:38:51:83", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap213d4e79-86", "ovs_interfaceid": "213d4e79-8676-47f0-9e73-c3a36df750d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.820047] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195373, 'name': ReconfigVM_Task, 'duration_secs': 0.392243} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.820432] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance 'e9d33611-e373-4303-937d-b43d893e51f5' progress to 33 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1308.850016] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076297} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.850348] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1308.851197] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86548a57-be1d-4c1f-8775-40e4f5887255 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.877273] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 680afe04-ddc8-4e14-9260-509dded09a0b/680afe04-ddc8-4e14-9260-509dded09a0b.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1308.877613] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a23c6681-4d47-439c-8d09-6b601aa2798f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.901073] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1308.901073] env[62627]: value = "task-2195375" [ 1308.901073] env[62627]: _type = "Task" [ 1308.901073] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.909344] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195375, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.046513] env[62627]: DEBUG nova.network.neutron [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance_info_cache with network_info: [{"id": "57af03b0-8d4d-4f61-a384-800617af9be4", "address": "fa:16:3e:02:a2:5f", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57af03b0-8d", "ovs_interfaceid": "57af03b0-8d4d-4f61-a384-800617af9be4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.099662] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.328183] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1309.328445] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1309.328603] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1309.328824] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1309.328984] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1309.329147] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1309.329345] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1309.329500] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1309.329661] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1309.329914] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1309.330221] env[62627]: DEBUG nova.virt.hardware [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1309.335655] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Reconfiguring VM instance instance-00000041 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1309.335959] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8b965b7-3368-4a28-b621-76252ee4aa94 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.354375] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1309.354375] env[62627]: value = "task-2195376" [ 1309.354375] env[62627]: _type = "Task" [ 1309.354375] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.362241] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195376, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.412026] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195375, 'name': ReconfigVM_Task, 'duration_secs': 0.341985} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.412026] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 680afe04-ddc8-4e14-9260-509dded09a0b/680afe04-ddc8-4e14-9260-509dded09a0b.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1309.413165] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2d635f0-102e-41be-85cc-00284f72f738 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.419315] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1309.419315] env[62627]: value = "task-2195377" [ 1309.419315] env[62627]: _type = "Task" [ 1309.419315] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.427286] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195377, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.549649] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.864819] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195376, 'name': ReconfigVM_Task, 'duration_secs': 0.212532} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.865167] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Reconfigured VM instance instance-00000041 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1309.865804] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5e3da3-32bc-4bdf-9620-e6ceb8c68211 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.887787] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] e9d33611-e373-4303-937d-b43d893e51f5/e9d33611-e373-4303-937d-b43d893e51f5.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1309.888045] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30562aad-93e0-4d1f-85bb-1f58bea453f3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.906210] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1309.906210] env[62627]: value = "task-2195378" [ 1309.906210] env[62627]: _type = "Task" [ 1309.906210] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.914182] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195378, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.927968] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195377, 'name': Rename_Task, 'duration_secs': 0.143064} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.928254] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1309.928490] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-275f1a67-689d-4473-b0d3-9b869226170f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.936719] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1309.936719] env[62627]: value = "task-2195379" [ 1309.936719] env[62627]: _type = "Task" [ 1309.936719] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.944609] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195379, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.059663] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7685c5db-9e33-4fff-93c2-4a7c1b6d08b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.066990] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c60e1b5-cfc1-4022-a873-22db1ae16892 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.415868] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195378, 'name': ReconfigVM_Task, 'duration_secs': 0.274263} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.416178] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Reconfigured VM instance instance-00000041 to attach disk [datastore1] e9d33611-e373-4303-937d-b43d893e51f5/e9d33611-e373-4303-937d-b43d893e51f5.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1310.416454] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance 'e9d33611-e373-4303-937d-b43d893e51f5' progress to 50 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1310.446170] env[62627]: DEBUG oslo_vmware.api [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195379, 'name': PowerOnVM_Task, 'duration_secs': 0.448112} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.446433] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1310.446662] env[62627]: INFO nova.compute.manager [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Took 7.74 seconds to spawn the instance on the hypervisor. [ 1310.446839] env[62627]: DEBUG nova.compute.manager [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1310.447603] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec84bab-c12d-438f-bd23-c0ebda8848d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.614101] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aba0926-ea20-48ef-a8ed-2b37bbae0a9c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.634091] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance '02b9bb2d-5752-4c09-9661-5966c2ac1bad' progress to 0 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1310.923451] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d82f1a-ffca-4466-99f1-6f65cad62283 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.942251] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42af2feb-4e52-4114-bf4a-7e5f01164fac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.962371] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance 'e9d33611-e373-4303-937d-b43d893e51f5' progress to 67 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1310.969698] env[62627]: INFO nova.compute.manager [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Took 15.39 seconds to build instance. [ 1311.140368] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1311.140539] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2432a96d-91cf-4bd4-b584-7e0ca5fe9de4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.147976] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1311.147976] env[62627]: value = "task-2195380" [ 1311.147976] env[62627]: _type = "Task" [ 1311.147976] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.156307] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195380, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.203108] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bd16b6-d874-47c6-9bad-e6a3bf5eca1b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.226923] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950a9b9c-ef84-4a81-8300-59ca17419de8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.234579] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance '1593ca50-bc97-464a-86ca-17995db15758' progress to 83 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1311.471288] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9488fd18-7d5b-438e-b924-5b9e0b7fdb1a tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "680afe04-ddc8-4e14-9260-509dded09a0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.896s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.548313] env[62627]: DEBUG nova.network.neutron [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Port 5825a537-f659-4e7e-907e-e22208e06c0c binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1311.659039] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195380, 'name': PowerOffVM_Task, 'duration_secs': 0.157192} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.659328] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1311.659515] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance '02b9bb2d-5752-4c09-9661-5966c2ac1bad' progress to 17 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1311.740777] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1311.741433] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1fdc790e-8f14-4ea3-8c8e-bba8433e20e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.748422] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1311.748422] env[62627]: value = "task-2195381" [ 1311.748422] env[62627]: _type = "Task" [ 1311.748422] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.758380] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.167196] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1312.167574] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1312.167681] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1312.167851] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1312.168014] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1312.168170] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1312.168378] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1312.168539] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1312.168704] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1312.168943] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1312.169153] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1312.174319] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a3c96cc-1bea-4bfd-be3d-72f0784f70ca {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.194831] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1312.194831] env[62627]: value = "task-2195382" [ 1312.194831] env[62627]: _type = "Task" [ 1312.194831] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.205910] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195382, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.258297] env[62627]: DEBUG oslo_vmware.api [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195381, 'name': PowerOnVM_Task, 'duration_secs': 0.42344} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.258535] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1312.258747] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2ca83e66-27dc-4c69-9df1-79cd7d897970 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance '1593ca50-bc97-464a-86ca-17995db15758' progress to 100 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1312.570739] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "e9d33611-e373-4303-937d-b43d893e51f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.570739] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.570952] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.628977] env[62627]: DEBUG oslo_concurrency.lockutils [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "680afe04-ddc8-4e14-9260-509dded09a0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.629269] env[62627]: DEBUG oslo_concurrency.lockutils [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "680afe04-ddc8-4e14-9260-509dded09a0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.629479] env[62627]: DEBUG oslo_concurrency.lockutils [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "680afe04-ddc8-4e14-9260-509dded09a0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.629664] env[62627]: DEBUG oslo_concurrency.lockutils [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "680afe04-ddc8-4e14-9260-509dded09a0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.629840] env[62627]: DEBUG oslo_concurrency.lockutils [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "680afe04-ddc8-4e14-9260-509dded09a0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.632046] env[62627]: INFO nova.compute.manager [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Terminating instance [ 1312.705335] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195382, 'name': ReconfigVM_Task, 'duration_secs': 0.149769} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.705642] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance '02b9bb2d-5752-4c09-9661-5966c2ac1bad' progress to 33 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1313.136073] env[62627]: DEBUG nova.compute.manager [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1313.136268] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1313.137655] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64be244b-7d74-4fbd-a141-8a0b7bb49dc4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.146656] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1313.146926] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bce24b9-d991-42f4-9878-705f488d88e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.155650] env[62627]: DEBUG oslo_vmware.api [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1313.155650] env[62627]: value = "task-2195383" [ 1313.155650] env[62627]: _type = "Task" [ 1313.155650] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.164642] env[62627]: DEBUG oslo_vmware.api [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.212738] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1313.213185] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1313.213374] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1313.213575] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1313.213765] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1313.213968] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1313.214256] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1313.214462] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1313.214674] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1313.214859] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1313.215051] env[62627]: DEBUG nova.virt.hardware [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1313.220787] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1313.221495] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e63561fe-eaf4-4dcc-b691-072699164cea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.240530] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1313.240530] env[62627]: value = "task-2195384" [ 1313.240530] env[62627]: _type = "Task" [ 1313.240530] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.248550] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195384, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.632495] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1313.632687] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.632865] env[62627]: DEBUG nova.network.neutron [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1313.664602] env[62627]: DEBUG oslo_vmware.api [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195383, 'name': PowerOffVM_Task, 'duration_secs': 0.194999} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.664844] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1313.665017] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1313.665257] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3dca9209-53a9-4abf-baec-442256f836fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.750720] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195384, 'name': ReconfigVM_Task, 'duration_secs': 0.164383} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.751009] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1313.751901] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4ba453-3ea9-4e0c-9195-17f47363450e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.755349] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1313.755540] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1313.755718] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleting the datastore file [datastore2] 680afe04-ddc8-4e14-9260-509dded09a0b {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1313.756301] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fe48881-ba2c-4d2e-af0f-5733bcf695c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.779331] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d/volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1313.780598] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdae0bf6-88bd-4df2-ac92-943f9aac80ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.793225] env[62627]: DEBUG oslo_vmware.api [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1313.793225] env[62627]: value = "task-2195386" [ 1313.793225] env[62627]: _type = "Task" [ 1313.793225] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.797844] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1313.797844] env[62627]: value = "task-2195387" [ 1313.797844] env[62627]: _type = "Task" [ 1313.797844] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.803259] env[62627]: DEBUG oslo_vmware.api [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.807725] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195387, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.304244] env[62627]: DEBUG oslo_vmware.api [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158842} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.306780] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1314.306982] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1314.307178] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1314.307352] env[62627]: INFO nova.compute.manager [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1314.307585] env[62627]: DEBUG oslo.service.loopingcall [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1314.307796] env[62627]: DEBUG nova.compute.manager [-] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1314.307915] env[62627]: DEBUG nova.network.neutron [-] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1314.312476] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195387, 'name': ReconfigVM_Task, 'duration_secs': 0.288499} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.313227] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Reconfigured VM instance instance-0000006d to attach disk [datastore2] volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d/volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1314.313357] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance '02b9bb2d-5752-4c09-9661-5966c2ac1bad' progress to 50 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1314.452976] env[62627]: DEBUG nova.network.neutron [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance_info_cache with network_info: [{"id": "5825a537-f659-4e7e-907e-e22208e06c0c", "address": "fa:16:3e:a9:0b:9d", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5825a537-f6", "ovs_interfaceid": "5825a537-f659-4e7e-907e-e22208e06c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.688136] env[62627]: DEBUG nova.compute.manager [req-2cb71fc3-b23d-4928-b674-a850607ba431 req-db4c27c1-cb72-42af-8482-6bb45aa028bd service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Received event network-vif-deleted-00c086aa-5c1a-4b1e-8f68-aea4434492ea {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1314.688363] env[62627]: INFO nova.compute.manager [req-2cb71fc3-b23d-4928-b674-a850607ba431 req-db4c27c1-cb72-42af-8482-6bb45aa028bd service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Neutron deleted interface 00c086aa-5c1a-4b1e-8f68-aea4434492ea; detaching it from the instance and deleting it from the info cache [ 1314.688541] env[62627]: DEBUG nova.network.neutron [req-2cb71fc3-b23d-4928-b674-a850607ba431 req-db4c27c1-cb72-42af-8482-6bb45aa028bd service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.819921] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd73e960-e078-4b15-a67c-bb3a85618506 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.838936] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0758512-c12b-4fc8-8599-54d24079a245 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.859574] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance '02b9bb2d-5752-4c09-9661-5966c2ac1bad' progress to 67 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1314.955390] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.990796] env[62627]: DEBUG nova.network.neutron [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Port 57af03b0-8d4d-4f61-a384-800617af9be4 binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1314.991069] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.991249] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.991418] env[62627]: DEBUG nova.network.neutron [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1315.149224] env[62627]: DEBUG nova.network.neutron [-] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.191448] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60347591-1790-4b6e-8b5e-508ceb1ef592 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.201868] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873cd000-62f9-42a7-97e9-824ba842e90d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.233532] env[62627]: DEBUG nova.compute.manager [req-2cb71fc3-b23d-4928-b674-a850607ba431 req-db4c27c1-cb72-42af-8482-6bb45aa028bd service nova] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Detach interface failed, port_id=00c086aa-5c1a-4b1e-8f68-aea4434492ea, reason: Instance 680afe04-ddc8-4e14-9260-509dded09a0b could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1315.498119] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d35d43-e8aa-4048-8a44-c666ecd86a13 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.520735] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832e7be2-3b61-4419-b825-28754507cee9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.526510] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance 'e9d33611-e373-4303-937d-b43d893e51f5' progress to 83 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1315.655221] env[62627]: INFO nova.compute.manager [-] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Took 1.35 seconds to deallocate network for instance. [ 1315.792999] env[62627]: DEBUG nova.network.neutron [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance_info_cache with network_info: [{"id": "57af03b0-8d4d-4f61-a384-800617af9be4", "address": "fa:16:3e:02:a2:5f", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57af03b0-8d", "ovs_interfaceid": "57af03b0-8d4d-4f61-a384-800617af9be4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.033999] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1316.033999] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbe16bfc-12a9-4fad-85eb-66d198fc3446 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.041430] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1316.041430] env[62627]: value = "task-2195388" [ 1316.041430] env[62627]: _type = "Task" [ 1316.041430] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.049117] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195388, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.163473] env[62627]: DEBUG oslo_concurrency.lockutils [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.163744] env[62627]: DEBUG oslo_concurrency.lockutils [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.163967] env[62627]: DEBUG nova.objects.instance [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lazy-loading 'resources' on Instance uuid 680afe04-ddc8-4e14-9260-509dded09a0b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1316.295845] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.518051] env[62627]: DEBUG nova.network.neutron [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Port 213d4e79-8676-47f0-9e73-c3a36df750d1 binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1316.551568] env[62627]: DEBUG oslo_vmware.api [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195388, 'name': PowerOnVM_Task, 'duration_secs': 0.365452} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.551822] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1316.552013] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c46a7684-c36b-457f-936f-a83939b7afa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance 'e9d33611-e373-4303-937d-b43d893e51f5' progress to 100 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1316.794858] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d8661c-6b87-45f5-981b-f44fd6298e82 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.799840] env[62627]: DEBUG nova.compute.manager [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62627) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1316.803804] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59462ed3-6a89-4f67-8fdf-f40265a57448 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.835143] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ec9a17-f81f-44ae-ac69-fc0763fd14e5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.842136] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2efae540-f515-44a9-86f3-218104f621af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.857861] env[62627]: DEBUG nova.compute.provider_tree [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1317.363166] env[62627]: DEBUG nova.scheduler.client.report [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1317.543413] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.543762] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.543950] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.868108] env[62627]: DEBUG oslo_concurrency.lockutils [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.704s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.894699] env[62627]: INFO nova.scheduler.client.report [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted allocations for instance 680afe04-ddc8-4e14-9260-509dded09a0b [ 1317.905372] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.905469] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.402056] env[62627]: DEBUG oslo_concurrency.lockutils [None req-149bf9ae-d7bf-41aa-8fc1-7ea514b3c7dc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "680afe04-ddc8-4e14-9260-509dded09a0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.773s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1318.407608] env[62627]: DEBUG nova.objects.instance [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'migration_context' on Instance uuid 1593ca50-bc97-464a-86ca-17995db15758 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1318.579874] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1318.580230] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.580458] env[62627]: DEBUG nova.network.neutron [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1319.049742] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10435756-52a1-41f8-8632-fec790ab446a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.058384] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0faef8-74fd-4533-b441-e4b1348000f8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.093235] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545d3d12-46b6-4756-97db-dcb5e0fa842f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.100516] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "e9d33611-e373-4303-937d-b43d893e51f5" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.100759] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.100936] env[62627]: DEBUG nova.compute.manager [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Going to confirm migration 8 {{(pid=62627) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1319.103939] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002ee7bc-7377-4f03-b0f1-011619998caf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.120042] env[62627]: DEBUG nova.compute.provider_tree [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1319.367921] env[62627]: DEBUG nova.network.neutron [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance_info_cache with network_info: [{"id": "213d4e79-8676-47f0-9e73-c3a36df750d1", "address": "fa:16:3e:38:51:83", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap213d4e79-86", "ovs_interfaceid": "213d4e79-8676-47f0-9e73-c3a36df750d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.625056] env[62627]: DEBUG nova.scheduler.client.report [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1319.645525] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1319.645713] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.645893] env[62627]: DEBUG nova.network.neutron [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1319.646117] env[62627]: DEBUG nova.objects.instance [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'info_cache' on Instance uuid e9d33611-e373-4303-937d-b43d893e51f5 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1319.822031] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "66f280de-f963-4f68-818b-ab0534be4c55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.822142] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "66f280de-f963-4f68-818b-ab0534be4c55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.833590] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.833827] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.873023] env[62627]: DEBUG oslo_concurrency.lockutils [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1320.324931] env[62627]: DEBUG nova.compute.manager [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1320.337301] env[62627]: DEBUG nova.compute.utils [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1320.381064] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ac80e0-6528-496d-b7f4-2e5ed8e93e1d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.388402] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd296160-bc69-4a52-ab80-c99e871ea8b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.634751] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.729s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.841055] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.855261] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.855552] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.857306] env[62627]: INFO nova.compute.claims [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1320.893128] env[62627]: DEBUG nova.network.neutron [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance_info_cache with network_info: [{"id": "5825a537-f659-4e7e-907e-e22208e06c0c", "address": "fa:16:3e:a9:0b:9d", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5825a537-f6", "ovs_interfaceid": "5825a537-f659-4e7e-907e-e22208e06c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.396862] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-e9d33611-e373-4303-937d-b43d893e51f5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1321.396862] env[62627]: DEBUG nova.objects.instance [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'migration_context' on Instance uuid e9d33611-e373-4303-937d-b43d893e51f5 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1321.491488] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8736b0dc-6204-48f5-9278-dc26cd9031e7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.511318] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75925dd1-8ec6-4b07-8a98-31f3b590b652 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.517809] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance '02b9bb2d-5752-4c09-9661-5966c2ac1bad' progress to 83 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1321.900311] env[62627]: DEBUG nova.objects.base [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1321.901805] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6ca8bc-3e29-40e3-84c8-f69d74c62ec0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.922586] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9f02b45-c52d-4ce6-974c-e57cfc9637e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.925182] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.925448] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.925774] env[62627]: INFO nova.compute.manager [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Attaching volume b7a99f16-9526-46b7-ac02-baad9561109d to /dev/sdb [ 1321.931550] env[62627]: DEBUG oslo_vmware.api [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1321.931550] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ee2ee4-279b-f487-c8af-71c1cf7aa43f" [ 1321.931550] env[62627]: _type = "Task" [ 1321.931550] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.944555] env[62627]: DEBUG oslo_vmware.api [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ee2ee4-279b-f487-c8af-71c1cf7aa43f, 'name': SearchDatastore_Task, 'duration_secs': 0.007999} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.944814] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.962964] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46780577-b288-4669-921c-60cf1028682d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.970704] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f021c39d-2184-47e4-8258-2b8b4faf2f6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.984288] env[62627]: DEBUG nova.virt.block_device [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Updating existing volume attachment record: 1fb15dc2-8183-4f6c-9114-eeffc0845807 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1322.013751] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea00c77f-acad-45d1-a9d3-0c9070b37e62 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.021199] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0924e1-5d22-437f-803c-96fadc3a263b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.025794] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1322.025794] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2da197d9-614d-4d16-97be-fe59104e7349 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.055443] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cbd86c-363e-4279-97dc-4b1f05a994e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.058298] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1322.058298] env[62627]: value = "task-2195389" [ 1322.058298] env[62627]: _type = "Task" [ 1322.058298] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.065037] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64e58cf-dbc6-4b27-adb9-a0029828e32a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.071603] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195389, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.080792] env[62627]: DEBUG nova.compute.provider_tree [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1322.167987] env[62627]: INFO nova.compute.manager [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Swapping old allocation on dict_keys(['c17e7f32-68d8-4e2f-8979-15d701de02f7']) held by migration 353cbf1f-0e8f-485e-beba-4bf4f09e0497 for instance [ 1322.192241] env[62627]: DEBUG nova.scheduler.client.report [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Overwriting current allocation {'allocations': {'c17e7f32-68d8-4e2f-8979-15d701de02f7': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 160}}, 'project_id': 'a9f3a36c557145e38c528a628b1f3b67', 'user_id': 'bc7ce5392ecd486b962c3db9d43181e1', 'consumer_generation': 1} on consumer 1593ca50-bc97-464a-86ca-17995db15758 {{(pid=62627) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1322.276037] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.276219] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.276400] env[62627]: DEBUG nova.network.neutron [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1322.569989] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195389, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.584072] env[62627]: DEBUG nova.scheduler.client.report [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1322.993037] env[62627]: DEBUG nova.network.neutron [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance_info_cache with network_info: [{"id": "57af03b0-8d4d-4f61-a384-800617af9be4", "address": "fa:16:3e:02:a2:5f", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57af03b0-8d", "ovs_interfaceid": "57af03b0-8d4d-4f61-a384-800617af9be4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.068833] env[62627]: DEBUG oslo_vmware.api [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195389, 'name': PowerOnVM_Task, 'duration_secs': 0.574136} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.069182] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1323.069369] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-72c1f7ac-86cd-44c2-a274-800b50742cfe tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance '02b9bb2d-5752-4c09-9661-5966c2ac1bad' progress to 100 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1323.089458] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.089981] env[62627]: DEBUG nova.compute.manager [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1323.092747] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.148s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.495775] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-1593ca50-bc97-464a-86ca-17995db15758" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.496952] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d8675e-4680-4ae0-a360-affc24992be9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.504390] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79fc53a-d095-4905-89f0-4e6f99a83655 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.599056] env[62627]: DEBUG nova.compute.utils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1323.601364] env[62627]: DEBUG nova.compute.manager [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1323.601600] env[62627]: DEBUG nova.network.neutron [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1323.654766] env[62627]: DEBUG nova.policy [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd847ab3ec20743d0997defea8008448e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '710e7463994b4ec08bcc6d479cd1138b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1323.724496] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27caec5d-af4b-4f15-b9b7-9a515069cb53 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.731830] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae03e5b-ade2-419f-836d-4a64ca46ad3e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.762419] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4976e9be-aef0-46c5-81f8-32de7b1f0532 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.769192] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66827aac-b79d-45c5-af9b-3be07ba98f5a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.781825] env[62627]: DEBUG nova.compute.provider_tree [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1323.926541] env[62627]: DEBUG nova.network.neutron [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Successfully created port: 0aca5f16-116e-4907-b0dc-1f1cd6bc60ef {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1324.104395] env[62627]: DEBUG nova.compute.manager [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1324.284746] env[62627]: DEBUG nova.scheduler.client.report [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1324.612041] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1324.612442] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fbc8a533-a314-4df7-8b30-0f90648f727a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.619260] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1324.619260] env[62627]: value = "task-2195392" [ 1324.619260] env[62627]: _type = "Task" [ 1324.619260] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.627764] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.114192] env[62627]: DEBUG nova.compute.manager [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1325.130026] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195392, 'name': PowerOffVM_Task, 'duration_secs': 0.272862} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.130398] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1325.131238] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1325.131529] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1325.131776] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1325.132063] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1325.132327] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1325.132505] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1325.132777] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1325.132962] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1325.133152] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1325.133321] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1325.133494] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1325.139367] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b21ae259-494e-4836-ae7c-0550f60b0d11 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.156665] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1325.156665] env[62627]: value = "task-2195393" [ 1325.156665] env[62627]: _type = "Task" [ 1325.156665] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.158853] env[62627]: DEBUG nova.virt.hardware [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1325.159125] env[62627]: DEBUG nova.virt.hardware [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1325.159300] env[62627]: DEBUG nova.virt.hardware [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1325.159488] env[62627]: DEBUG nova.virt.hardware [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1325.159636] env[62627]: DEBUG nova.virt.hardware [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1325.159785] env[62627]: DEBUG nova.virt.hardware [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1325.159990] env[62627]: DEBUG nova.virt.hardware [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1325.160202] env[62627]: DEBUG nova.virt.hardware [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1325.160385] env[62627]: DEBUG nova.virt.hardware [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1325.160581] env[62627]: DEBUG nova.virt.hardware [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1325.160774] env[62627]: DEBUG nova.virt.hardware [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1325.161934] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a71932-b39f-45e7-aa17-94aa49cd0fd3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.176450] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbbddbf-10d3-48b5-9b66-e8c18bf0c3f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.180532] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195393, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.294308] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.201s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.326827] env[62627]: DEBUG nova.compute.manager [req-5a7c6e62-1fd5-4006-a66c-2135f07c6b25 req-97a7da18-ec24-4667-ab5a-bbe82bd42c63 service nova] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Received event network-vif-plugged-0aca5f16-116e-4907-b0dc-1f1cd6bc60ef {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1325.327152] env[62627]: DEBUG oslo_concurrency.lockutils [req-5a7c6e62-1fd5-4006-a66c-2135f07c6b25 req-97a7da18-ec24-4667-ab5a-bbe82bd42c63 service nova] Acquiring lock "66f280de-f963-4f68-818b-ab0534be4c55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.327360] env[62627]: DEBUG oslo_concurrency.lockutils [req-5a7c6e62-1fd5-4006-a66c-2135f07c6b25 req-97a7da18-ec24-4667-ab5a-bbe82bd42c63 service nova] Lock "66f280de-f963-4f68-818b-ab0534be4c55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.327517] env[62627]: DEBUG oslo_concurrency.lockutils [req-5a7c6e62-1fd5-4006-a66c-2135f07c6b25 req-97a7da18-ec24-4667-ab5a-bbe82bd42c63 service nova] Lock "66f280de-f963-4f68-818b-ab0534be4c55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.327694] env[62627]: DEBUG nova.compute.manager [req-5a7c6e62-1fd5-4006-a66c-2135f07c6b25 req-97a7da18-ec24-4667-ab5a-bbe82bd42c63 service nova] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] No waiting events found dispatching network-vif-plugged-0aca5f16-116e-4907-b0dc-1f1cd6bc60ef {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1325.327880] env[62627]: WARNING nova.compute.manager [req-5a7c6e62-1fd5-4006-a66c-2135f07c6b25 req-97a7da18-ec24-4667-ab5a-bbe82bd42c63 service nova] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Received unexpected event network-vif-plugged-0aca5f16-116e-4907-b0dc-1f1cd6bc60ef for instance with vm_state building and task_state spawning. [ 1325.425925] env[62627]: DEBUG nova.network.neutron [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Successfully updated port: 0aca5f16-116e-4907-b0dc-1f1cd6bc60ef {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1325.671271] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195393, 'name': ReconfigVM_Task, 'duration_secs': 0.200363} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.671875] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3046d574-74c1-4ed2-9e4d-d776d96050c9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.693721] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1325.693971] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1325.694242] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1325.694349] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1325.694495] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1325.694640] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1325.694845] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1325.695012] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1325.695189] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1325.695351] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1325.695523] env[62627]: DEBUG nova.virt.hardware [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1325.696349] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-120cd793-ad17-4a5a-9c08-f89f6d97d907 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.701859] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1325.701859] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5220a748-23ac-2df3-d0b3-7616d542970e" [ 1325.701859] env[62627]: _type = "Task" [ 1325.701859] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.709192] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5220a748-23ac-2df3-d0b3-7616d542970e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.861273] env[62627]: INFO nova.scheduler.client.report [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleted allocation for migration bd33cf80-bb50-408b-8395-b6c800449c22 [ 1325.930803] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "refresh_cache-66f280de-f963-4f68-818b-ab0534be4c55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.930803] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "refresh_cache-66f280de-f963-4f68-818b-ab0534be4c55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.930803] env[62627]: DEBUG nova.network.neutron [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1326.156034] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.156034] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.156405] env[62627]: DEBUG nova.compute.manager [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Going to confirm migration 9 {{(pid=62627) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1326.212879] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5220a748-23ac-2df3-d0b3-7616d542970e, 'name': SearchDatastore_Task, 'duration_secs': 0.017829} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.218072] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfiguring VM instance instance-00000067 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1326.218346] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ec1619f-3a55-45ec-93c8-2cfa5ecd1d79 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.236296] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1326.236296] env[62627]: value = "task-2195394" [ 1326.236296] env[62627]: _type = "Task" [ 1326.236296] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.243891] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195394, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.367642] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e0470d39-f560-44c4-82c4-ff37e75850a6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.267s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.465015] env[62627]: DEBUG nova.network.neutron [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1326.532423] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1326.532724] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447838', 'volume_id': 'b7a99f16-9526-46b7-ac02-baad9561109d', 'name': 'volume-b7a99f16-9526-46b7-ac02-baad9561109d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e', 'attached_at': '', 'detached_at': '', 'volume_id': 'b7a99f16-9526-46b7-ac02-baad9561109d', 'serial': 'b7a99f16-9526-46b7-ac02-baad9561109d'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1326.534676] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda33177-33ae-42fd-b750-47cad961ed9b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.560160] env[62627]: DEBUG oslo_concurrency.lockutils [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "e9d33611-e373-4303-937d-b43d893e51f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.560502] env[62627]: DEBUG oslo_concurrency.lockutils [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.560810] env[62627]: DEBUG oslo_concurrency.lockutils [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "e9d33611-e373-4303-937d-b43d893e51f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.561099] env[62627]: DEBUG oslo_concurrency.lockutils [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.561369] env[62627]: DEBUG oslo_concurrency.lockutils [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.564178] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced09d27-529f-4e91-9dca-8f76d9c89346 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.568284] env[62627]: INFO nova.compute.manager [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Terminating instance [ 1326.604859] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] volume-b7a99f16-9526-46b7-ac02-baad9561109d/volume-b7a99f16-9526-46b7-ac02-baad9561109d.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1326.608811] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46bb4936-42ad-4dd3-9282-c8d27bad7a67 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.635963] env[62627]: DEBUG oslo_vmware.api [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1326.635963] env[62627]: value = "task-2195395" [ 1326.635963] env[62627]: _type = "Task" [ 1326.635963] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.646789] env[62627]: DEBUG oslo_vmware.api [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195395, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.662744] env[62627]: DEBUG nova.network.neutron [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Updating instance_info_cache with network_info: [{"id": "0aca5f16-116e-4907-b0dc-1f1cd6bc60ef", "address": "fa:16:3e:2a:5b:df", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aca5f16-11", "ovs_interfaceid": "0aca5f16-116e-4907-b0dc-1f1cd6bc60ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.722921] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.723129] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquired lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.723349] env[62627]: DEBUG nova.network.neutron [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1326.723544] env[62627]: DEBUG nova.objects.instance [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lazy-loading 'info_cache' on Instance uuid 02b9bb2d-5752-4c09-9661-5966c2ac1bad {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1326.745640] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195394, 'name': ReconfigVM_Task, 'duration_secs': 0.282614} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.745895] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfigured VM instance instance-00000067 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1326.746655] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64896943-7469-40c9-b42f-b1fa871b734f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.771156] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 1593ca50-bc97-464a-86ca-17995db15758/1593ca50-bc97-464a-86ca-17995db15758.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1326.771410] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbb63bf6-de7a-419d-8e84-cd19f4c40dd6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.789225] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1326.789225] env[62627]: value = "task-2195396" [ 1326.789225] env[62627]: _type = "Task" [ 1326.789225] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.799158] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195396, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.073552] env[62627]: DEBUG nova.compute.manager [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1327.073773] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1327.074727] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9f8016-9173-4a31-9499-8038c1d17a0e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.081910] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1327.082167] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-407c6bb4-ef91-49f3-bc92-3381ff1a4b6d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.089101] env[62627]: DEBUG oslo_vmware.api [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1327.089101] env[62627]: value = "task-2195397" [ 1327.089101] env[62627]: _type = "Task" [ 1327.089101] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.097223] env[62627]: DEBUG oslo_vmware.api [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195397, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.145853] env[62627]: DEBUG oslo_vmware.api [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195395, 'name': ReconfigVM_Task, 'duration_secs': 0.427776} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.146204] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Reconfigured VM instance instance-0000006b to attach disk [datastore2] volume-b7a99f16-9526-46b7-ac02-baad9561109d/volume-b7a99f16-9526-46b7-ac02-baad9561109d.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1327.150986] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f198d5f1-1fa3-43d7-a155-58f924c004b0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.164954] env[62627]: DEBUG oslo_vmware.api [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1327.164954] env[62627]: value = "task-2195398" [ 1327.164954] env[62627]: _type = "Task" [ 1327.164954] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.168138] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "refresh_cache-66f280de-f963-4f68-818b-ab0534be4c55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.168442] env[62627]: DEBUG nova.compute.manager [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Instance network_info: |[{"id": "0aca5f16-116e-4907-b0dc-1f1cd6bc60ef", "address": "fa:16:3e:2a:5b:df", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aca5f16-11", "ovs_interfaceid": "0aca5f16-116e-4907-b0dc-1f1cd6bc60ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1327.168812] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:5b:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a94c29-ddd5-4383-9219-1c2c3bb09cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0aca5f16-116e-4907-b0dc-1f1cd6bc60ef', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1327.175803] env[62627]: DEBUG oslo.service.loopingcall [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1327.176368] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1327.176584] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e22047c6-8716-47db-8ba6-5a7edc3426d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.196120] env[62627]: DEBUG oslo_vmware.api [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195398, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.201582] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1327.201582] env[62627]: value = "task-2195399" [ 1327.201582] env[62627]: _type = "Task" [ 1327.201582] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.210559] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195399, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.302033] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195396, 'name': ReconfigVM_Task, 'duration_secs': 0.307185} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.302405] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 1593ca50-bc97-464a-86ca-17995db15758/1593ca50-bc97-464a-86ca-17995db15758.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1327.303517] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14b53d3-2a1f-4bc9-9825-7b2eb946ad2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.333077] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebedaf06-ebc5-48d5-b09f-08595cc2b174 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.361692] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ad10d0-978e-420d-9714-192e0c5c03d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.365323] env[62627]: DEBUG nova.compute.manager [req-90e98566-e426-40c0-8785-a29532128d57 req-35e87374-8857-48ff-af86-27c7aac83fcb service nova] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Received event network-changed-0aca5f16-116e-4907-b0dc-1f1cd6bc60ef {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1327.365509] env[62627]: DEBUG nova.compute.manager [req-90e98566-e426-40c0-8785-a29532128d57 req-35e87374-8857-48ff-af86-27c7aac83fcb service nova] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Refreshing instance network info cache due to event network-changed-0aca5f16-116e-4907-b0dc-1f1cd6bc60ef. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1327.365715] env[62627]: DEBUG oslo_concurrency.lockutils [req-90e98566-e426-40c0-8785-a29532128d57 req-35e87374-8857-48ff-af86-27c7aac83fcb service nova] Acquiring lock "refresh_cache-66f280de-f963-4f68-818b-ab0534be4c55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1327.365857] env[62627]: DEBUG oslo_concurrency.lockutils [req-90e98566-e426-40c0-8785-a29532128d57 req-35e87374-8857-48ff-af86-27c7aac83fcb service nova] Acquired lock "refresh_cache-66f280de-f963-4f68-818b-ab0534be4c55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.366025] env[62627]: DEBUG nova.network.neutron [req-90e98566-e426-40c0-8785-a29532128d57 req-35e87374-8857-48ff-af86-27c7aac83fcb service nova] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Refreshing network info cache for port 0aca5f16-116e-4907-b0dc-1f1cd6bc60ef {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1327.388881] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768a0ffd-2c8a-4c41-bae9-08ef8bee08dd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.396024] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1327.396468] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a660010a-52ad-4fb1-a946-8743b08792a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.401968] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1327.401968] env[62627]: value = "task-2195400" [ 1327.401968] env[62627]: _type = "Task" [ 1327.401968] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.411460] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195400, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.598760] env[62627]: DEBUG oslo_vmware.api [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195397, 'name': PowerOffVM_Task, 'duration_secs': 0.294822} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.599012] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1327.599225] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1327.599484] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-363ffd83-778a-42b8-a346-a8f1b628e1c6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.656345] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1327.656681] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Deleting contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1327.657009] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleting the datastore file [datastore1] e9d33611-e373-4303-937d-b43d893e51f5 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1327.657381] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5454884-a70e-478c-9a00-93b270c10583 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.664276] env[62627]: DEBUG oslo_vmware.api [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1327.664276] env[62627]: value = "task-2195402" [ 1327.664276] env[62627]: _type = "Task" [ 1327.664276] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.681911] env[62627]: DEBUG oslo_vmware.api [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195398, 'name': ReconfigVM_Task, 'duration_secs': 0.133261} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.682167] env[62627]: DEBUG oslo_vmware.api [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.682487] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447838', 'volume_id': 'b7a99f16-9526-46b7-ac02-baad9561109d', 'name': 'volume-b7a99f16-9526-46b7-ac02-baad9561109d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e', 'attached_at': '', 'detached_at': '', 'volume_id': 'b7a99f16-9526-46b7-ac02-baad9561109d', 'serial': 'b7a99f16-9526-46b7-ac02-baad9561109d'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1327.711159] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195399, 'name': CreateVM_Task, 'duration_secs': 0.430773} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.711323] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1327.718461] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1327.718629] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.718986] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1327.719246] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45c47763-1ce7-404a-a54e-458139185f21 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.724697] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1327.724697] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52abeafb-1105-fefb-3eef-ec715119b357" [ 1327.724697] env[62627]: _type = "Task" [ 1327.724697] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.733412] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52abeafb-1105-fefb-3eef-ec715119b357, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.910916] env[62627]: DEBUG oslo_vmware.api [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195400, 'name': PowerOnVM_Task, 'duration_secs': 0.367054} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.912205] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1327.994930] env[62627]: DEBUG nova.network.neutron [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance_info_cache with network_info: [{"id": "213d4e79-8676-47f0-9e73-c3a36df750d1", "address": "fa:16:3e:38:51:83", "network": {"id": "1e37275e-31eb-4a79-8ff1-1b105d16fe3d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-59116622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a97f57bdf4a46db80460b1d5c9f9b9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap213d4e79-86", "ovs_interfaceid": "213d4e79-8676-47f0-9e73-c3a36df750d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.112252] env[62627]: DEBUG nova.network.neutron [req-90e98566-e426-40c0-8785-a29532128d57 req-35e87374-8857-48ff-af86-27c7aac83fcb service nova] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Updated VIF entry in instance network info cache for port 0aca5f16-116e-4907-b0dc-1f1cd6bc60ef. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1328.112806] env[62627]: DEBUG nova.network.neutron [req-90e98566-e426-40c0-8785-a29532128d57 req-35e87374-8857-48ff-af86-27c7aac83fcb service nova] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Updating instance_info_cache with network_info: [{"id": "0aca5f16-116e-4907-b0dc-1f1cd6bc60ef", "address": "fa:16:3e:2a:5b:df", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aca5f16-11", "ovs_interfaceid": "0aca5f16-116e-4907-b0dc-1f1cd6bc60ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.174298] env[62627]: DEBUG oslo_vmware.api [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.235829] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52abeafb-1105-fefb-3eef-ec715119b357, 'name': SearchDatastore_Task, 'duration_secs': 0.016004} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.236025] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1328.236261] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1328.236493] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.236639] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.236851] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1328.237096] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd9d5039-5cf1-4585-b7ad-48ee3c0006d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.245802] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1328.245976] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1328.246683] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76e28efe-e38c-4722-ad7c-30f1473ed6b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.252237] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1328.252237] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52120a71-94c4-b99d-fc75-126f47bc5a87" [ 1328.252237] env[62627]: _type = "Task" [ 1328.252237] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.259637] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52120a71-94c4-b99d-fc75-126f47bc5a87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.496324] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Releasing lock "refresh_cache-02b9bb2d-5752-4c09-9661-5966c2ac1bad" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1328.496587] env[62627]: DEBUG nova.objects.instance [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lazy-loading 'migration_context' on Instance uuid 02b9bb2d-5752-4c09-9661-5966c2ac1bad {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1328.616231] env[62627]: DEBUG oslo_concurrency.lockutils [req-90e98566-e426-40c0-8785-a29532128d57 req-35e87374-8857-48ff-af86-27c7aac83fcb service nova] Releasing lock "refresh_cache-66f280de-f963-4f68-818b-ab0534be4c55" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1328.675166] env[62627]: DEBUG oslo_vmware.api [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195402, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.665096} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.675435] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1328.675693] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Deleted contents of the VM from datastore datastore1 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1328.675886] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1328.676079] env[62627]: INFO nova.compute.manager [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Took 1.60 seconds to destroy the instance on the hypervisor. [ 1328.676321] env[62627]: DEBUG oslo.service.loopingcall [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1328.676510] env[62627]: DEBUG nova.compute.manager [-] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1328.676605] env[62627]: DEBUG nova.network.neutron [-] [instance: e9d33611-e373-4303-937d-b43d893e51f5] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1328.716863] env[62627]: DEBUG nova.objects.instance [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lazy-loading 'flavor' on Instance uuid cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1328.762419] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52120a71-94c4-b99d-fc75-126f47bc5a87, 'name': SearchDatastore_Task, 'duration_secs': 0.009999} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.763456] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19625a29-e179-4477-b393-ba83f1b2a322 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.768478] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1328.768478] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]527e6074-d3ea-242b-f065-8040e499ed2d" [ 1328.768478] env[62627]: _type = "Task" [ 1328.768478] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.775749] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527e6074-d3ea-242b-f065-8040e499ed2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.953415] env[62627]: INFO nova.compute.manager [None req-7bc950f5-edec-4422-842b-2f0c8b2cd8c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance to original state: 'active' [ 1328.999039] env[62627]: DEBUG nova.objects.base [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Object Instance<02b9bb2d-5752-4c09-9661-5966c2ac1bad> lazy-loaded attributes: info_cache,migration_context {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1328.999903] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bd2672-6169-41a3-80f4-590b8cef2aad {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.020046] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32301c1f-12e4-418e-94d5-05ef94953ca2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.025112] env[62627]: DEBUG oslo_vmware.api [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1329.025112] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f91473-5290-b345-4f7f-d3d86f39cb5b" [ 1329.025112] env[62627]: _type = "Task" [ 1329.025112] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.032894] env[62627]: DEBUG oslo_vmware.api [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f91473-5290-b345-4f7f-d3d86f39cb5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.224420] env[62627]: DEBUG nova.compute.manager [req-eb010a8d-fd7b-4902-b2b9-90c68ced9fc9 req-74b2b538-9d8c-43b1-b9c8-7dfe85aa5ae7 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Received event network-vif-deleted-5825a537-f659-4e7e-907e-e22208e06c0c {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1329.224676] env[62627]: INFO nova.compute.manager [req-eb010a8d-fd7b-4902-b2b9-90c68ced9fc9 req-74b2b538-9d8c-43b1-b9c8-7dfe85aa5ae7 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Neutron deleted interface 5825a537-f659-4e7e-907e-e22208e06c0c; detaching it from the instance and deleting it from the info cache [ 1329.224784] env[62627]: DEBUG nova.network.neutron [req-eb010a8d-fd7b-4902-b2b9-90c68ced9fc9 req-74b2b538-9d8c-43b1-b9c8-7dfe85aa5ae7 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.225878] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f6ca66e6-8a33-4397-a13e-d47cfe8c05a3 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.300s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.284050] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]527e6074-d3ea-242b-f065-8040e499ed2d, 'name': SearchDatastore_Task, 'duration_secs': 0.043836} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.284715] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.284715] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 66f280de-f963-4f68-818b-ab0534be4c55/66f280de-f963-4f68-818b-ab0534be4c55.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1329.284955] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3f03b57-d25e-4e0d-93c6-0f50048ed67a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.292301] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1329.292301] env[62627]: value = "task-2195403" [ 1329.292301] env[62627]: _type = "Task" [ 1329.292301] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.300288] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.404369] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.404691] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.536877] env[62627]: DEBUG oslo_vmware.api [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f91473-5290-b345-4f7f-d3d86f39cb5b, 'name': SearchDatastore_Task, 'duration_secs': 0.006841} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.537334] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.537620] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.620018] env[62627]: DEBUG nova.network.neutron [-] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.728203] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c48117b-ad90-4fbd-af1b-76c7b55aa284 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.739660] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480359b4-6ec2-4a3f-afc1-0850192023c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.769994] env[62627]: DEBUG nova.compute.manager [req-eb010a8d-fd7b-4902-b2b9-90c68ced9fc9 req-74b2b538-9d8c-43b1-b9c8-7dfe85aa5ae7 service nova] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Detach interface failed, port_id=5825a537-f659-4e7e-907e-e22208e06c0c, reason: Instance e9d33611-e373-4303-937d-b43d893e51f5 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1329.802059] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504391} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.802059] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 66f280de-f963-4f68-818b-ab0534be4c55/66f280de-f963-4f68-818b-ab0534be4c55.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1329.802330] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1329.802484] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-67463e1a-0ed0-4c69-9810-a3a4a9f632a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.808247] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1329.808247] env[62627]: value = "task-2195404" [ 1329.808247] env[62627]: _type = "Task" [ 1329.808247] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.816642] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.908423] env[62627]: INFO nova.compute.manager [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Detaching volume b7a99f16-9526-46b7-ac02-baad9561109d [ 1329.946484] env[62627]: INFO nova.virt.block_device [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Attempting to driver detach volume b7a99f16-9526-46b7-ac02-baad9561109d from mountpoint /dev/sdb [ 1329.946572] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1329.946799] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447838', 'volume_id': 'b7a99f16-9526-46b7-ac02-baad9561109d', 'name': 'volume-b7a99f16-9526-46b7-ac02-baad9561109d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e', 'attached_at': '', 'detached_at': '', 'volume_id': 'b7a99f16-9526-46b7-ac02-baad9561109d', 'serial': 'b7a99f16-9526-46b7-ac02-baad9561109d'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1329.947829] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a37b93e-37b1-49d6-ac1d-62a20ffdee94 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.973636] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85b1ab0-0fb6-4581-8208-c67aec209726 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.981196] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a16165-ee20-443e-9187-d1c303e8723e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.002554] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "1593ca50-bc97-464a-86ca-17995db15758" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.002795] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.003015] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "1593ca50-bc97-464a-86ca-17995db15758-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.003221] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.003434] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.005567] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f1e7fd-595f-4401-93b5-682626081580 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.008775] env[62627]: INFO nova.compute.manager [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Terminating instance [ 1330.024095] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] The volume has not been displaced from its original location: [datastore2] volume-b7a99f16-9526-46b7-ac02-baad9561109d/volume-b7a99f16-9526-46b7-ac02-baad9561109d.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1330.029296] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1330.029824] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2132431-a1db-4fa3-a983-ef00ecd5ff79 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.049676] env[62627]: DEBUG oslo_vmware.api [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1330.049676] env[62627]: value = "task-2195405" [ 1330.049676] env[62627]: _type = "Task" [ 1330.049676] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.058151] env[62627]: DEBUG oslo_vmware.api [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.123108] env[62627]: INFO nova.compute.manager [-] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Took 1.45 seconds to deallocate network for instance. [ 1330.181186] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24663009-9a9d-4a9f-925d-e63b5c25ee7f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.190589] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c292b6-e56b-423f-bfae-c628373021f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.224861] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d41721d-3cf6-4461-a4c1-118e3ef200bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.233313] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2998b9-a785-4934-8610-8b9fc52188d7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.248092] env[62627]: DEBUG nova.compute.provider_tree [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1330.317570] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054316} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.317857] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1330.318758] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4bb30b-7c24-4a20-96b7-9e21b6ace478 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.340908] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 66f280de-f963-4f68-818b-ab0534be4c55/66f280de-f963-4f68-818b-ab0534be4c55.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1330.341206] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c9ad892-aad2-41aa-adf1-08c35705a4ee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.360835] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1330.360835] env[62627]: value = "task-2195406" [ 1330.360835] env[62627]: _type = "Task" [ 1330.360835] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.368668] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195406, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.513406] env[62627]: DEBUG nova.compute.manager [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1330.514067] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1330.514067] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a598f71b-aa6f-4126-9ef3-1b2dc91c69ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.521044] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1330.521044] env[62627]: value = "task-2195407" [ 1330.521044] env[62627]: _type = "Task" [ 1330.521044] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.529421] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195407, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.559251] env[62627]: DEBUG oslo_vmware.api [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195405, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.630556] env[62627]: DEBUG oslo_concurrency.lockutils [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.751971] env[62627]: DEBUG nova.scheduler.client.report [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1330.870835] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195406, 'name': ReconfigVM_Task, 'duration_secs': 0.294798} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.871170] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 66f280de-f963-4f68-818b-ab0534be4c55/66f280de-f963-4f68-818b-ab0534be4c55.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1330.871815] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03885839-06de-4239-a24a-5a2fae346863 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.878755] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1330.878755] env[62627]: value = "task-2195408" [ 1330.878755] env[62627]: _type = "Task" [ 1330.878755] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.885901] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195408, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.031382] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195407, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.062247] env[62627]: DEBUG oslo_vmware.api [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195405, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.391148] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195408, 'name': Rename_Task, 'duration_secs': 0.169152} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.391148] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1331.391148] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e46fb4e-8c68-4e1e-a6e2-004c9814d165 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.396803] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1331.396803] env[62627]: value = "task-2195409" [ 1331.396803] env[62627]: _type = "Task" [ 1331.396803] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.404216] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195409, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.531700] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195407, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.561019] env[62627]: DEBUG oslo_vmware.api [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195405, 'name': ReconfigVM_Task, 'duration_secs': 1.354043} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.561278] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1331.566200] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43cc0272-43d3-4e2c-87df-d6fc20cba4b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.580121] env[62627]: DEBUG oslo_vmware.api [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1331.580121] env[62627]: value = "task-2195410" [ 1331.580121] env[62627]: _type = "Task" [ 1331.580121] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.587857] env[62627]: DEBUG oslo_vmware.api [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195410, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.762592] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.225s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.765428] env[62627]: DEBUG oslo_concurrency.lockutils [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.135s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.765632] env[62627]: DEBUG oslo_concurrency.lockutils [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.783410] env[62627]: INFO nova.scheduler.client.report [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleted allocations for instance e9d33611-e373-4303-937d-b43d893e51f5 [ 1331.906573] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195409, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.031786] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195407, 'name': PowerOffVM_Task, 'duration_secs': 1.053322} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.032176] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1332.032301] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1332.032480] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447832', 'volume_id': 'ade44e27-be53-4d9a-b346-ccf17aadfded', 'name': 'volume-ade44e27-be53-4d9a-b346-ccf17aadfded', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '1593ca50-bc97-464a-86ca-17995db15758', 'attached_at': '2025-01-30T20:40:53.000000', 'detached_at': '', 'volume_id': 'ade44e27-be53-4d9a-b346-ccf17aadfded', 'serial': 'ade44e27-be53-4d9a-b346-ccf17aadfded'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1332.033537] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca2bd45-1681-45ef-b683-00ad404dec82 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.054208] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e091c2c3-673c-4fed-a813-a847391ccb91 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.061393] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c17700-584b-4764-8807-87ec6e35342f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.085353] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481089c3-117d-4ad6-bfa5-7992aaf5fe7d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.092305] env[62627]: DEBUG oslo_vmware.api [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195410, 'name': ReconfigVM_Task, 'duration_secs': 0.133571} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.102308] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447838', 'volume_id': 'b7a99f16-9526-46b7-ac02-baad9561109d', 'name': 'volume-b7a99f16-9526-46b7-ac02-baad9561109d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e', 'attached_at': '', 'detached_at': '', 'volume_id': 'b7a99f16-9526-46b7-ac02-baad9561109d', 'serial': 'b7a99f16-9526-46b7-ac02-baad9561109d'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1332.104368] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] The volume has not been displaced from its original location: [datastore2] volume-ade44e27-be53-4d9a-b346-ccf17aadfded/volume-ade44e27-be53-4d9a-b346-ccf17aadfded.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1332.109375] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1332.109846] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40bf10c2-969d-4f61-96cb-73c8214c4abb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.131059] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1332.131059] env[62627]: value = "task-2195411" [ 1332.131059] env[62627]: _type = "Task" [ 1332.131059] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.138588] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195411, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.291038] env[62627]: DEBUG oslo_concurrency.lockutils [None req-665f932b-615f-4fd2-a7ae-6704507294f2 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "e9d33611-e373-4303-937d-b43d893e51f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.730s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.317308] env[62627]: INFO nova.scheduler.client.report [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted allocation for migration 98c06cf0-c052-4b55-bb5e-c646ddabcc85 [ 1332.406185] env[62627]: DEBUG oslo_vmware.api [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195409, 'name': PowerOnVM_Task, 'duration_secs': 0.619918} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.406564] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1332.406688] env[62627]: INFO nova.compute.manager [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Took 7.29 seconds to spawn the instance on the hypervisor. [ 1332.406866] env[62627]: DEBUG nova.compute.manager [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1332.407664] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a9742d-765c-47d0-aa8c-9545b18ec43d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.640553] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195411, 'name': ReconfigVM_Task, 'duration_secs': 0.18983} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.640808] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1332.645586] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9da40620-3ace-4227-be0c-9cbab4a2e856 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.656458] env[62627]: DEBUG nova.objects.instance [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lazy-loading 'flavor' on Instance uuid cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1332.666860] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1332.666860] env[62627]: value = "task-2195412" [ 1332.666860] env[62627]: _type = "Task" [ 1332.666860] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.676907] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195412, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.701826] env[62627]: INFO nova.compute.manager [None req-cc0caf3f-a26a-4953-8efd-fecdf0ffbad1 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Get console output [ 1332.702196] env[62627]: WARNING nova.virt.vmwareapi.driver [None req-cc0caf3f-a26a-4953-8efd-fecdf0ffbad1 tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] The console log is missing. Check your VSPC configuration [ 1332.822749] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7abd8502-e532-4e89-8e2d-a29e0b72788a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.667s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.926032] env[62627]: INFO nova.compute.manager [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Took 12.10 seconds to build instance. [ 1333.183128] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195412, 'name': ReconfigVM_Task, 'duration_secs': 0.137486} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.183465] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447832', 'volume_id': 'ade44e27-be53-4d9a-b346-ccf17aadfded', 'name': 'volume-ade44e27-be53-4d9a-b346-ccf17aadfded', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '1593ca50-bc97-464a-86ca-17995db15758', 'attached_at': '2025-01-30T20:40:53.000000', 'detached_at': '', 'volume_id': 'ade44e27-be53-4d9a-b346-ccf17aadfded', 'serial': 'ade44e27-be53-4d9a-b346-ccf17aadfded'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1333.183765] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1333.184601] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5daa627-6762-406a-a134-1442a6ca0fe2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.192613] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1333.192865] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8eea8598-9e9a-4344-9dcc-11a410b1e613 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.271347] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1333.271624] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1333.271809] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleting the datastore file [datastore2] 1593ca50-bc97-464a-86ca-17995db15758 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1333.272116] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-811e99b3-0eeb-43f3-9831-0a2117a5babe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.277953] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1333.277953] env[62627]: value = "task-2195414" [ 1333.277953] env[62627]: _type = "Task" [ 1333.277953] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.286747] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195414, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.430895] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5bb73d86-b4d7-43f0-89da-8ca805ba633e tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "66f280de-f963-4f68-818b-ab0534be4c55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.609s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.678624] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b0586b60-ad91-4576-9b82-57052da73c8c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.274s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.787275] env[62627]: DEBUG oslo_vmware.api [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195414, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160561} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.787537] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1333.787721] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1333.787897] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1333.788082] env[62627]: INFO nova.compute.manager [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Took 3.27 seconds to destroy the instance on the hypervisor. [ 1333.788324] env[62627]: DEBUG oslo.service.loopingcall [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1333.788507] env[62627]: DEBUG nova.compute.manager [-] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1333.788604] env[62627]: DEBUG nova.network.neutron [-] [instance: 1593ca50-bc97-464a-86ca-17995db15758] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1333.791730] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "66f280de-f963-4f68-818b-ab0534be4c55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.791946] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "66f280de-f963-4f68-818b-ab0534be4c55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.792172] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "66f280de-f963-4f68-818b-ab0534be4c55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.792375] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "66f280de-f963-4f68-818b-ab0534be4c55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.792542] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "66f280de-f963-4f68-818b-ab0534be4c55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.794351] env[62627]: INFO nova.compute.manager [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Terminating instance [ 1333.919955] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.920211] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.938581] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.938761] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.938968] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.939196] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.939373] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.941316] env[62627]: INFO nova.compute.manager [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Terminating instance [ 1334.215516] env[62627]: DEBUG nova.compute.manager [req-972952b6-7ab7-4ec8-9340-3c1ff0c4e109 req-558b3291-66d2-44e6-95d9-ced1388c6a2b service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Received event network-vif-deleted-57af03b0-8d4d-4f61-a384-800617af9be4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1334.215827] env[62627]: INFO nova.compute.manager [req-972952b6-7ab7-4ec8-9340-3c1ff0c4e109 req-558b3291-66d2-44e6-95d9-ced1388c6a2b service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Neutron deleted interface 57af03b0-8d4d-4f61-a384-800617af9be4; detaching it from the instance and deleting it from the info cache [ 1334.215926] env[62627]: DEBUG nova.network.neutron [req-972952b6-7ab7-4ec8-9340-3c1ff0c4e109 req-558b3291-66d2-44e6-95d9-ced1388c6a2b service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.298048] env[62627]: DEBUG nova.compute.manager [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1334.298354] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1334.299685] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea43ca41-b6c0-456b-b5ab-486d4822a960 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.307922] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1334.308180] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17242f76-b06a-4dd0-be52-72ef46fda948 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.314483] env[62627]: DEBUG oslo_vmware.api [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1334.314483] env[62627]: value = "task-2195415" [ 1334.314483] env[62627]: _type = "Task" [ 1334.314483] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.323711] env[62627]: DEBUG oslo_vmware.api [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.422720] env[62627]: DEBUG nova.compute.manager [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1334.445685] env[62627]: DEBUG nova.compute.manager [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1334.445953] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1334.446812] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0171fe1c-9e18-4aae-a422-cd60efad475e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.454551] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1334.454823] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ed17917-0c74-4b70-8e71-7085a7c487e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.462405] env[62627]: DEBUG oslo_vmware.api [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1334.462405] env[62627]: value = "task-2195416" [ 1334.462405] env[62627]: _type = "Task" [ 1334.462405] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.470817] env[62627]: DEBUG oslo_vmware.api [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.697213] env[62627]: DEBUG nova.network.neutron [-] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.718614] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b43ea213-a356-4f27-adff-ed12f9b68ad2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.728371] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e048fdcf-7e30-4525-811e-9c5231680c2a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.755437] env[62627]: DEBUG nova.compute.manager [req-972952b6-7ab7-4ec8-9340-3c1ff0c4e109 req-558b3291-66d2-44e6-95d9-ced1388c6a2b service nova] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Detach interface failed, port_id=57af03b0-8d4d-4f61-a384-800617af9be4, reason: Instance 1593ca50-bc97-464a-86ca-17995db15758 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1334.824226] env[62627]: DEBUG oslo_vmware.api [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195415, 'name': PowerOffVM_Task, 'duration_secs': 0.190747} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.824501] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1334.824669] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1334.824913] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23816b9d-74de-433e-a6de-996848710471 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.886286] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1334.886531] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1334.886720] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleting the datastore file [datastore2] 66f280de-f963-4f68-818b-ab0534be4c55 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1334.887043] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17b78157-e052-4f27-933a-3d960a88f8f3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.893584] env[62627]: DEBUG oslo_vmware.api [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1334.893584] env[62627]: value = "task-2195418" [ 1334.893584] env[62627]: _type = "Task" [ 1334.893584] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.900860] env[62627]: DEBUG oslo_vmware.api [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195418, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.946789] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.947065] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.948657] env[62627]: INFO nova.compute.claims [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1334.972031] env[62627]: DEBUG oslo_vmware.api [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195416, 'name': PowerOffVM_Task, 'duration_secs': 0.209305} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.972318] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1334.972485] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1334.972730] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb0f9059-1b98-4f32-baaf-6d2861060428 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.030430] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1335.030667] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1335.030902] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleting the datastore file [datastore2] cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1335.031133] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98206f46-a8b1-4e48-a948-43c58bcaca46 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.038631] env[62627]: DEBUG oslo_vmware.api [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1335.038631] env[62627]: value = "task-2195420" [ 1335.038631] env[62627]: _type = "Task" [ 1335.038631] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.046492] env[62627]: DEBUG oslo_vmware.api [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.200319] env[62627]: INFO nova.compute.manager [-] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Took 1.41 seconds to deallocate network for instance. [ 1335.403374] env[62627]: DEBUG oslo_vmware.api [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195418, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180089} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.403611] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1335.403790] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1335.403970] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1335.404162] env[62627]: INFO nova.compute.manager [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1335.404403] env[62627]: DEBUG oslo.service.loopingcall [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.404593] env[62627]: DEBUG nova.compute.manager [-] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1335.404688] env[62627]: DEBUG nova.network.neutron [-] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1335.550460] env[62627]: DEBUG oslo_vmware.api [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197929} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.550797] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1335.550894] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1335.551075] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1335.551257] env[62627]: INFO nova.compute.manager [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1335.551493] env[62627]: DEBUG oslo.service.loopingcall [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.551683] env[62627]: DEBUG nova.compute.manager [-] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1335.551777] env[62627]: DEBUG nova.network.neutron [-] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1335.746568] env[62627]: INFO nova.compute.manager [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Took 0.55 seconds to detach 1 volumes for instance. [ 1336.054336] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbccdb6-3214-4726-ad06-95ab5bd00195 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.064440] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61be3cb0-1dd7-4be6-925f-5c045bec359b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.111923] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1fd1bb-4dcd-49d5-9f1c-a2a810f31930 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.119226] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c529814-820c-438a-9b65-fc4821320a8d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.132363] env[62627]: DEBUG nova.compute.provider_tree [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.183536] env[62627]: DEBUG nova.network.neutron [-] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.208140] env[62627]: DEBUG nova.compute.manager [req-f3c3f543-7a2d-45f9-8205-dd0c6acd89dc req-cd596378-dc51-4135-ba19-f69d9dd67854 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Received event network-vif-deleted-f766cde0-2056-4245-861e-695b52bcc3b2 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1336.208362] env[62627]: INFO nova.compute.manager [req-f3c3f543-7a2d-45f9-8205-dd0c6acd89dc req-cd596378-dc51-4135-ba19-f69d9dd67854 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Neutron deleted interface f766cde0-2056-4245-861e-695b52bcc3b2; detaching it from the instance and deleting it from the info cache [ 1336.208612] env[62627]: DEBUG nova.network.neutron [req-f3c3f543-7a2d-45f9-8205-dd0c6acd89dc req-cd596378-dc51-4135-ba19-f69d9dd67854 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.240649] env[62627]: DEBUG nova.compute.manager [req-6827b94d-9215-4723-96c7-6e5b682a0fdd req-ca6647a8-11ca-41ca-99f9-189c387bccdd service nova] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Received event network-vif-deleted-0aca5f16-116e-4907-b0dc-1f1cd6bc60ef {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1336.253306] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.635583] env[62627]: DEBUG nova.scheduler.client.report [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1336.686029] env[62627]: INFO nova.compute.manager [-] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Took 1.28 seconds to deallocate network for instance. [ 1336.687481] env[62627]: DEBUG nova.network.neutron [-] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.712221] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37aea2a6-da27-4a72-adff-3fe412223a34 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.721479] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa39c48b-81a7-4195-9274-4351afc12f4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.749129] env[62627]: DEBUG nova.compute.manager [req-f3c3f543-7a2d-45f9-8205-dd0c6acd89dc req-cd596378-dc51-4135-ba19-f69d9dd67854 service nova] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Detach interface failed, port_id=f766cde0-2056-4245-861e-695b52bcc3b2, reason: Instance cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1337.140775] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.140938] env[62627]: DEBUG nova.compute.manager [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1337.144259] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.891s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.144545] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.169124] env[62627]: INFO nova.scheduler.client.report [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleted allocations for instance 1593ca50-bc97-464a-86ca-17995db15758 [ 1337.192160] env[62627]: INFO nova.compute.manager [-] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Took 1.64 seconds to deallocate network for instance. [ 1337.197966] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.198507] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.199224] env[62627]: DEBUG nova.objects.instance [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lazy-loading 'resources' on Instance uuid 66f280de-f963-4f68-818b-ab0534be4c55 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1337.649758] env[62627]: DEBUG nova.compute.utils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1337.651531] env[62627]: DEBUG nova.compute.manager [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1337.651719] env[62627]: DEBUG nova.network.neutron [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1337.677147] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2fd33279-741e-4328-b83d-b3ca250d0765 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "1593ca50-bc97-464a-86ca-17995db15758" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.674s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.698550] env[62627]: DEBUG nova.policy [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e72f59cc4a02460088e86ad29af1e0ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97c11d791dcf4f94a8e8bda7a6f014cd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1337.703869] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.797289] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18233439-f537-40ba-a29d-05aef1682d9e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.805030] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f2b82f-1d79-4ede-9550-8ca2fe9e17a7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.837627] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91710197-a895-480a-9349-1565f99f17ce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.844754] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ebfac9-e345-4eea-9ff0-3d05a883e319 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.858089] env[62627]: DEBUG nova.compute.provider_tree [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.001022] env[62627]: DEBUG nova.network.neutron [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Successfully created port: 4cbd817b-c3ef-4a33-8648-4297830602e0 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1338.154589] env[62627]: DEBUG nova.compute.manager [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1338.322317] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.322544] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.361458] env[62627]: DEBUG nova.scheduler.client.report [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1338.824987] env[62627]: DEBUG nova.compute.manager [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1338.866126] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.668s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.869523] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.165s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.869523] env[62627]: DEBUG nova.objects.instance [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lazy-loading 'resources' on Instance uuid cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1338.885147] env[62627]: INFO nova.scheduler.client.report [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted allocations for instance 66f280de-f963-4f68-818b-ab0534be4c55 [ 1339.163689] env[62627]: DEBUG nova.compute.manager [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1339.190443] env[62627]: DEBUG nova.virt.hardware [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1339.190690] env[62627]: DEBUG nova.virt.hardware [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1339.190847] env[62627]: DEBUG nova.virt.hardware [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1339.191036] env[62627]: DEBUG nova.virt.hardware [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1339.191218] env[62627]: DEBUG nova.virt.hardware [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1339.191446] env[62627]: DEBUG nova.virt.hardware [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1339.191676] env[62627]: DEBUG nova.virt.hardware [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1339.191839] env[62627]: DEBUG nova.virt.hardware [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1339.192013] env[62627]: DEBUG nova.virt.hardware [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1339.192186] env[62627]: DEBUG nova.virt.hardware [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1339.192363] env[62627]: DEBUG nova.virt.hardware [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1339.193216] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26dbdbd5-a52a-42ec-9b44-bf65a253b1a6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.201236] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03886f1b-5fd9-479e-b35d-182aa884b949 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.345145] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.386665] env[62627]: DEBUG nova.compute.manager [req-fea75cb8-6176-4b2e-9ef3-63893d908c00 req-2b29e33d-2e5d-4435-a3ae-91548bce7446 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Received event network-vif-plugged-4cbd817b-c3ef-4a33-8648-4297830602e0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1339.386887] env[62627]: DEBUG oslo_concurrency.lockutils [req-fea75cb8-6176-4b2e-9ef3-63893d908c00 req-2b29e33d-2e5d-4435-a3ae-91548bce7446 service nova] Acquiring lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.387136] env[62627]: DEBUG oslo_concurrency.lockutils [req-fea75cb8-6176-4b2e-9ef3-63893d908c00 req-2b29e33d-2e5d-4435-a3ae-91548bce7446 service nova] Lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.387333] env[62627]: DEBUG oslo_concurrency.lockutils [req-fea75cb8-6176-4b2e-9ef3-63893d908c00 req-2b29e33d-2e5d-4435-a3ae-91548bce7446 service nova] Lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.387507] env[62627]: DEBUG nova.compute.manager [req-fea75cb8-6176-4b2e-9ef3-63893d908c00 req-2b29e33d-2e5d-4435-a3ae-91548bce7446 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] No waiting events found dispatching network-vif-plugged-4cbd817b-c3ef-4a33-8648-4297830602e0 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1339.387677] env[62627]: WARNING nova.compute.manager [req-fea75cb8-6176-4b2e-9ef3-63893d908c00 req-2b29e33d-2e5d-4435-a3ae-91548bce7446 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Received unexpected event network-vif-plugged-4cbd817b-c3ef-4a33-8648-4297830602e0 for instance with vm_state building and task_state spawning. [ 1339.393532] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c7d52801-5d4e-415b-9d6e-81293abccabb tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "66f280de-f963-4f68-818b-ab0534be4c55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.601s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.465532] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76be742-9c96-411c-ae95-16cddd48d35d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.473848] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4816718-0263-4fd9-ad4f-3e11e360681a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.482583] env[62627]: DEBUG nova.network.neutron [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Successfully updated port: 4cbd817b-c3ef-4a33-8648-4297830602e0 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1339.513729] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0d603f-17be-419c-a969-c4aedbf4b542 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.521596] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a82542d-4810-44dc-81da-71a3fc45ddde {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.534981] env[62627]: DEBUG nova.compute.provider_tree [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1339.988410] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.988663] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.988800] env[62627]: DEBUG nova.network.neutron [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1340.037748] env[62627]: DEBUG nova.scheduler.client.report [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1340.522113] env[62627]: DEBUG nova.network.neutron [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1340.542614] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.674s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.545062] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.200s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.546032] env[62627]: INFO nova.compute.claims [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1340.558480] env[62627]: INFO nova.scheduler.client.report [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleted allocations for instance cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e [ 1340.649774] env[62627]: DEBUG nova.network.neutron [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance_info_cache with network_info: [{"id": "4cbd817b-c3ef-4a33-8648-4297830602e0", "address": "fa:16:3e:e3:f3:a5", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cbd817b-c3", "ovs_interfaceid": "4cbd817b-c3ef-4a33-8648-4297830602e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.810621] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.810883] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.066567] env[62627]: DEBUG oslo_concurrency.lockutils [None req-18f7efbf-9cf2-4974-b494-39571afdde48 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.128s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.152794] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.153121] env[62627]: DEBUG nova.compute.manager [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Instance network_info: |[{"id": "4cbd817b-c3ef-4a33-8648-4297830602e0", "address": "fa:16:3e:e3:f3:a5", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cbd817b-c3", "ovs_interfaceid": "4cbd817b-c3ef-4a33-8648-4297830602e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1341.153563] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:f3:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4cbd817b-c3ef-4a33-8648-4297830602e0', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1341.161104] env[62627]: DEBUG oslo.service.loopingcall [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1341.161326] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1341.161561] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed778aa0-c4ed-43b5-b693-eee9354f4273 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.181008] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1341.181008] env[62627]: value = "task-2195421" [ 1341.181008] env[62627]: _type = "Task" [ 1341.181008] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.191498] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195421, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.313748] env[62627]: DEBUG nova.compute.manager [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1341.413936] env[62627]: DEBUG nova.compute.manager [req-c015661e-f1ba-4f45-a4ec-6d893d0ed899 req-66ffcb97-c492-4277-9b96-c9b780afd808 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Received event network-changed-4cbd817b-c3ef-4a33-8648-4297830602e0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1341.414155] env[62627]: DEBUG nova.compute.manager [req-c015661e-f1ba-4f45-a4ec-6d893d0ed899 req-66ffcb97-c492-4277-9b96-c9b780afd808 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Refreshing instance network info cache due to event network-changed-4cbd817b-c3ef-4a33-8648-4297830602e0. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1341.414385] env[62627]: DEBUG oslo_concurrency.lockutils [req-c015661e-f1ba-4f45-a4ec-6d893d0ed899 req-66ffcb97-c492-4277-9b96-c9b780afd808 service nova] Acquiring lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.414637] env[62627]: DEBUG oslo_concurrency.lockutils [req-c015661e-f1ba-4f45-a4ec-6d893d0ed899 req-66ffcb97-c492-4277-9b96-c9b780afd808 service nova] Acquired lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.414878] env[62627]: DEBUG nova.network.neutron [req-c015661e-f1ba-4f45-a4ec-6d893d0ed899 req-66ffcb97-c492-4277-9b96-c9b780afd808 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Refreshing network info cache for port 4cbd817b-c3ef-4a33-8648-4297830602e0 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1341.645347] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30c2485-aebc-445b-ad45-04c807e46db8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.654604] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fc5b1b-30c8-4f69-b43c-2b3ece2b2140 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.693314] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8460bcca-a90b-43fa-89ad-f2a46489c320 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.702981] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d47a29-3f57-45a1-8ba0-37c541d4d48a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.706395] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195421, 'name': CreateVM_Task, 'duration_secs': 0.284202} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.706554] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1341.707478] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.707644] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.708010] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1341.708186] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1084ce50-0e77-40c1-a971-e9483fc2d671 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.716945] env[62627]: DEBUG nova.compute.provider_tree [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.723190] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1341.723190] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52bdbaf4-431e-55fb-d0be-279c6d25c089" [ 1341.723190] env[62627]: _type = "Task" [ 1341.723190] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.730300] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52bdbaf4-431e-55fb-d0be-279c6d25c089, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.831558] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.146246] env[62627]: DEBUG nova.network.neutron [req-c015661e-f1ba-4f45-a4ec-6d893d0ed899 req-66ffcb97-c492-4277-9b96-c9b780afd808 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updated VIF entry in instance network info cache for port 4cbd817b-c3ef-4a33-8648-4297830602e0. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1342.146616] env[62627]: DEBUG nova.network.neutron [req-c015661e-f1ba-4f45-a4ec-6d893d0ed899 req-66ffcb97-c492-4277-9b96-c9b780afd808 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance_info_cache with network_info: [{"id": "4cbd817b-c3ef-4a33-8648-4297830602e0", "address": "fa:16:3e:e3:f3:a5", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cbd817b-c3", "ovs_interfaceid": "4cbd817b-c3ef-4a33-8648-4297830602e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.220064] env[62627]: DEBUG nova.scheduler.client.report [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1342.233382] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52bdbaf4-431e-55fb-d0be-279c6d25c089, 'name': SearchDatastore_Task, 'duration_secs': 0.010758} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.233666] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1342.233886] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1342.234132] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.234281] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.234512] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1342.235278] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9e37f5a-a1bf-4e34-8854-ad09ae5fd9cc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.243805] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1342.243984] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1342.245030] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c31dc8b-1c1c-4386-94a6-5df72464a8fc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.252582] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1342.252582] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529d68dc-a9d0-6108-38b5-1f7968a17dc0" [ 1342.252582] env[62627]: _type = "Task" [ 1342.252582] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.260101] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529d68dc-a9d0-6108-38b5-1f7968a17dc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.649843] env[62627]: DEBUG oslo_concurrency.lockutils [req-c015661e-f1ba-4f45-a4ec-6d893d0ed899 req-66ffcb97-c492-4277-9b96-c9b780afd808 service nova] Releasing lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1342.725209] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.181s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.725770] env[62627]: DEBUG nova.compute.manager [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1342.728501] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.897s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1342.729915] env[62627]: INFO nova.compute.claims [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1342.763807] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529d68dc-a9d0-6108-38b5-1f7968a17dc0, 'name': SearchDatastore_Task, 'duration_secs': 0.008546} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.765578] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de20e1b3-dec3-4de6-bcd5-6d1f5d395614 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.771044] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1342.771044] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e29761-4699-1276-b304-36d07b2cee86" [ 1342.771044] env[62627]: _type = "Task" [ 1342.771044] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.778212] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e29761-4699-1276-b304-36d07b2cee86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.144627] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.144848] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.234433] env[62627]: DEBUG nova.compute.utils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1343.237523] env[62627]: DEBUG nova.compute.manager [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1343.237689] env[62627]: DEBUG nova.network.neutron [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1343.282144] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e29761-4699-1276-b304-36d07b2cee86, 'name': SearchDatastore_Task, 'duration_secs': 0.013197} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.282468] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.282660] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2/16ed0637-d24d-488d-b9ba-dd5a49a7b2a2.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1343.284053] env[62627]: DEBUG nova.policy [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc7ce5392ecd486b962c3db9d43181e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f3a36c557145e38c528a628b1f3b67', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1343.285501] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-715b6ac5-7b2a-4334-b8db-aac801f99c6f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.292316] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1343.292316] env[62627]: value = "task-2195422" [ 1343.292316] env[62627]: _type = "Task" [ 1343.292316] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.300009] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195422, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.592599] env[62627]: DEBUG nova.network.neutron [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Successfully created port: 71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1343.647436] env[62627]: DEBUG nova.compute.manager [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1343.738376] env[62627]: DEBUG nova.compute.manager [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1343.802660] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195422, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453917} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.802906] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2/16ed0637-d24d-488d-b9ba-dd5a49a7b2a2.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1343.803129] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1343.803380] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36363d4b-5381-491e-ac01-6b5cbf50c11c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.811028] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1343.811028] env[62627]: value = "task-2195423" [ 1343.811028] env[62627]: _type = "Task" [ 1343.811028] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.821342] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195423, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.854178] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb03b7f-8f77-4380-a0d4-8b073bc7f320 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.861435] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544eab00-70b6-40ba-82e2-e06f0706d246 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.891695] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db56df6-2346-4e6f-a5bd-4d7526da1f49 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.898678] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6947637c-30b8-43b6-bdb6-98e7279501c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.911693] env[62627]: DEBUG nova.compute.provider_tree [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.167089] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.322845] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195423, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069178} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.323827] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1344.324478] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2288a1-e8bc-4b76-8f0e-ed35eacfb455 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.346377] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2/16ed0637-d24d-488d-b9ba-dd5a49a7b2a2.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1344.346639] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-981abc1b-ba4c-4b17-8875-c62dc9110f10 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.365151] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1344.365151] env[62627]: value = "task-2195424" [ 1344.365151] env[62627]: _type = "Task" [ 1344.365151] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.374069] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195424, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.415249] env[62627]: DEBUG nova.scheduler.client.report [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1344.752530] env[62627]: DEBUG nova.compute.manager [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1344.777698] env[62627]: DEBUG nova.virt.hardware [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1344.777933] env[62627]: DEBUG nova.virt.hardware [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1344.778099] env[62627]: DEBUG nova.virt.hardware [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1344.778284] env[62627]: DEBUG nova.virt.hardware [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1344.778468] env[62627]: DEBUG nova.virt.hardware [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1344.778620] env[62627]: DEBUG nova.virt.hardware [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1344.778825] env[62627]: DEBUG nova.virt.hardware [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1344.778980] env[62627]: DEBUG nova.virt.hardware [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1344.779199] env[62627]: DEBUG nova.virt.hardware [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1344.779373] env[62627]: DEBUG nova.virt.hardware [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1344.779546] env[62627]: DEBUG nova.virt.hardware [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1344.780485] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d0efe0-1ce8-4eb3-88e9-39dd5836ddfc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.788298] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58999273-6723-43e1-889b-5a4e670f6c65 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.875664] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195424, 'name': ReconfigVM_Task, 'duration_secs': 0.259614} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.875955] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2/16ed0637-d24d-488d-b9ba-dd5a49a7b2a2.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1344.876749] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-279b35f8-eb9f-4825-873d-512acce072f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.884362] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1344.884362] env[62627]: value = "task-2195425" [ 1344.884362] env[62627]: _type = "Task" [ 1344.884362] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.891915] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195425, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.919875] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.920388] env[62627]: DEBUG nova.compute.manager [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1344.922908] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.756s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.924330] env[62627]: INFO nova.compute.claims [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1344.988357] env[62627]: DEBUG nova.compute.manager [req-8f19bbb8-6a37-49da-ab6f-f1b27e728ec8 req-03fe4d07-185f-46df-a7fb-ee27b5408187 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Received event network-vif-plugged-71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1344.988568] env[62627]: DEBUG oslo_concurrency.lockutils [req-8f19bbb8-6a37-49da-ab6f-f1b27e728ec8 req-03fe4d07-185f-46df-a7fb-ee27b5408187 service nova] Acquiring lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.988769] env[62627]: DEBUG oslo_concurrency.lockutils [req-8f19bbb8-6a37-49da-ab6f-f1b27e728ec8 req-03fe4d07-185f-46df-a7fb-ee27b5408187 service nova] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.988932] env[62627]: DEBUG oslo_concurrency.lockutils [req-8f19bbb8-6a37-49da-ab6f-f1b27e728ec8 req-03fe4d07-185f-46df-a7fb-ee27b5408187 service nova] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.989113] env[62627]: DEBUG nova.compute.manager [req-8f19bbb8-6a37-49da-ab6f-f1b27e728ec8 req-03fe4d07-185f-46df-a7fb-ee27b5408187 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] No waiting events found dispatching network-vif-plugged-71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1344.989330] env[62627]: WARNING nova.compute.manager [req-8f19bbb8-6a37-49da-ab6f-f1b27e728ec8 req-03fe4d07-185f-46df-a7fb-ee27b5408187 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Received unexpected event network-vif-plugged-71c1451f-de1d-4700-8326-282bc39bb6f7 for instance with vm_state building and task_state spawning. [ 1345.074561] env[62627]: DEBUG nova.network.neutron [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Successfully updated port: 71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1345.394166] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195425, 'name': Rename_Task, 'duration_secs': 0.227446} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.394504] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1345.394679] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7afd312e-8362-428c-9ff7-3388017dc1b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.401325] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1345.401325] env[62627]: value = "task-2195426" [ 1345.401325] env[62627]: _type = "Task" [ 1345.401325] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.408455] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195426, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.428893] env[62627]: DEBUG nova.compute.utils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1345.432414] env[62627]: DEBUG nova.compute.manager [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1345.432560] env[62627]: DEBUG nova.network.neutron [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1345.481604] env[62627]: DEBUG nova.policy [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd847ab3ec20743d0997defea8008448e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '710e7463994b4ec08bcc6d479cd1138b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1345.577469] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.577664] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.577833] env[62627]: DEBUG nova.network.neutron [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1345.742354] env[62627]: DEBUG nova.network.neutron [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Successfully created port: f488b8ce-9270-489a-862c-8166c83c1bdc {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1345.910585] env[62627]: DEBUG oslo_vmware.api [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195426, 'name': PowerOnVM_Task, 'duration_secs': 0.431826} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.910845] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1345.911061] env[62627]: INFO nova.compute.manager [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Took 6.75 seconds to spawn the instance on the hypervisor. [ 1345.911249] env[62627]: DEBUG nova.compute.manager [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1345.911985] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2e8e80-cb50-4274-bf12-342957a1788b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.935267] env[62627]: DEBUG nova.compute.manager [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1346.046484] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4364ce66-9d74-4631-a555-e319d9972d08 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.053848] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a042cf98-dddb-4e67-bd24-13cb62dfc571 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.086387] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a941d4-2c38-44e6-9d56-5805cf76a835 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.094587] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0167b5d3-0446-4c8a-a708-68a4ce5f6928 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.108112] env[62627]: DEBUG nova.compute.provider_tree [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.119086] env[62627]: DEBUG nova.network.neutron [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1346.241750] env[62627]: DEBUG nova.network.neutron [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updating instance_info_cache with network_info: [{"id": "71c1451f-de1d-4700-8326-282bc39bb6f7", "address": "fa:16:3e:fb:be:f1", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71c1451f-de", "ovs_interfaceid": "71c1451f-de1d-4700-8326-282bc39bb6f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.428446] env[62627]: INFO nova.compute.manager [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Took 11.50 seconds to build instance. [ 1346.612020] env[62627]: DEBUG nova.scheduler.client.report [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1346.670892] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.671105] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.671254] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1346.744779] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.745085] env[62627]: DEBUG nova.compute.manager [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Instance network_info: |[{"id": "71c1451f-de1d-4700-8326-282bc39bb6f7", "address": "fa:16:3e:fb:be:f1", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71c1451f-de", "ovs_interfaceid": "71c1451f-de1d-4700-8326-282bc39bb6f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1346.745462] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:be:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71c1451f-de1d-4700-8326-282bc39bb6f7', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1346.754824] env[62627]: DEBUG oslo.service.loopingcall [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1346.755316] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1346.755591] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba78393c-1f31-4122-ace3-1ec22485fcc9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.775527] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1346.775527] env[62627]: value = "task-2195427" [ 1346.775527] env[62627]: _type = "Task" [ 1346.775527] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.782962] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195427, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.930427] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07be5e9d-0f04-4813-b9a6-f5fa6290cc28 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.010s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.948460] env[62627]: DEBUG nova.compute.manager [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1346.975973] env[62627]: DEBUG nova.virt.hardware [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1346.976274] env[62627]: DEBUG nova.virt.hardware [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1346.976466] env[62627]: DEBUG nova.virt.hardware [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1346.976688] env[62627]: DEBUG nova.virt.hardware [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1346.976861] env[62627]: DEBUG nova.virt.hardware [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1346.977039] env[62627]: DEBUG nova.virt.hardware [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1346.977264] env[62627]: DEBUG nova.virt.hardware [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1346.977420] env[62627]: DEBUG nova.virt.hardware [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1346.977588] env[62627]: DEBUG nova.virt.hardware [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1346.977752] env[62627]: DEBUG nova.virt.hardware [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1346.977925] env[62627]: DEBUG nova.virt.hardware [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1346.978929] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed1102f-1d2b-453e-8b4b-9d69a64b78e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.987735] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a943ac-0f32-4213-9a9f-30b27bf12b24 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.018049] env[62627]: DEBUG nova.compute.manager [req-71325358-cd08-4c19-8d77-b85a40ea7715 req-8525834c-c9bd-48e1-bbee-fa305a184844 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Received event network-changed-71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1347.018259] env[62627]: DEBUG nova.compute.manager [req-71325358-cd08-4c19-8d77-b85a40ea7715 req-8525834c-c9bd-48e1-bbee-fa305a184844 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Refreshing instance network info cache due to event network-changed-71c1451f-de1d-4700-8326-282bc39bb6f7. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1347.018479] env[62627]: DEBUG oslo_concurrency.lockutils [req-71325358-cd08-4c19-8d77-b85a40ea7715 req-8525834c-c9bd-48e1-bbee-fa305a184844 service nova] Acquiring lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.018624] env[62627]: DEBUG oslo_concurrency.lockutils [req-71325358-cd08-4c19-8d77-b85a40ea7715 req-8525834c-c9bd-48e1-bbee-fa305a184844 service nova] Acquired lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.018786] env[62627]: DEBUG nova.network.neutron [req-71325358-cd08-4c19-8d77-b85a40ea7715 req-8525834c-c9bd-48e1-bbee-fa305a184844 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Refreshing network info cache for port 71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1347.116441] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.116942] env[62627]: DEBUG nova.compute.manager [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1347.286314] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195427, 'name': CreateVM_Task, 'duration_secs': 0.29306} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.286494] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1347.287296] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.287521] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.287848] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1347.288150] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3cf163b-2854-4632-bd67-9052453c6dd3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.292669] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1347.292669] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52851654-1565-58f8-9dc2-1450a9549aa9" [ 1347.292669] env[62627]: _type = "Task" [ 1347.292669] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.293430] env[62627]: DEBUG nova.network.neutron [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Successfully updated port: f488b8ce-9270-489a-862c-8166c83c1bdc {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1347.302571] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52851654-1565-58f8-9dc2-1450a9549aa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.623462] env[62627]: DEBUG nova.compute.utils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1347.624878] env[62627]: DEBUG nova.compute.manager [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1347.625055] env[62627]: DEBUG nova.network.neutron [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1347.664686] env[62627]: DEBUG nova.policy [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8960d0a8ce744e8b0536074751e9d0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f41b0c171180469e9092b8a8da697a78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1347.724685] env[62627]: DEBUG nova.network.neutron [req-71325358-cd08-4c19-8d77-b85a40ea7715 req-8525834c-c9bd-48e1-bbee-fa305a184844 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updated VIF entry in instance network info cache for port 71c1451f-de1d-4700-8326-282bc39bb6f7. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1347.725057] env[62627]: DEBUG nova.network.neutron [req-71325358-cd08-4c19-8d77-b85a40ea7715 req-8525834c-c9bd-48e1-bbee-fa305a184844 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updating instance_info_cache with network_info: [{"id": "71c1451f-de1d-4700-8326-282bc39bb6f7", "address": "fa:16:3e:fb:be:f1", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71c1451f-de", "ovs_interfaceid": "71c1451f-de1d-4700-8326-282bc39bb6f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.798541] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "refresh_cache-3b1e8b81-7d18-498a-b35e-8ab1c87374c8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.798689] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "refresh_cache-3b1e8b81-7d18-498a-b35e-8ab1c87374c8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.798838] env[62627]: DEBUG nova.network.neutron [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1347.806210] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52851654-1565-58f8-9dc2-1450a9549aa9, 'name': SearchDatastore_Task, 'duration_secs': 0.010139} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.806594] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.806812] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1347.807049] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.807196] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.807376] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1347.807623] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7ec0005-167b-453f-8a98-368d4ca06220 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.815863] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1347.816046] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1347.816719] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-923c86d2-014b-44b1-84cb-2304ee94ef05 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.822078] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1347.822078] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a9b9cc-bf80-30bb-6457-06df0cef6bbf" [ 1347.822078] env[62627]: _type = "Task" [ 1347.822078] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.829430] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a9b9cc-bf80-30bb-6457-06df0cef6bbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.005469] env[62627]: DEBUG nova.network.neutron [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Successfully created port: e8d180db-2d86-4c84-b9dc-877a37a56877 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1348.128268] env[62627]: DEBUG nova.compute.manager [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1348.228424] env[62627]: DEBUG oslo_concurrency.lockutils [req-71325358-cd08-4c19-8d77-b85a40ea7715 req-8525834c-c9bd-48e1-bbee-fa305a184844 service nova] Releasing lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.332072] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a9b9cc-bf80-30bb-6457-06df0cef6bbf, 'name': SearchDatastore_Task, 'duration_secs': 0.007611} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.332841] env[62627]: DEBUG nova.network.neutron [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1348.335199] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40f982a8-b2e3-4356-afb5-aefa365a5b11 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.340443] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1348.340443] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52504aea-c27b-116f-9980-ff043bf9845e" [ 1348.340443] env[62627]: _type = "Task" [ 1348.340443] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.347881] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52504aea-c27b-116f-9980-ff043bf9845e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.474874] env[62627]: DEBUG nova.network.neutron [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Updating instance_info_cache with network_info: [{"id": "f488b8ce-9270-489a-862c-8166c83c1bdc", "address": "fa:16:3e:22:65:5e", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf488b8ce-92", "ovs_interfaceid": "f488b8ce-9270-489a-862c-8166c83c1bdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.851655] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52504aea-c27b-116f-9980-ff043bf9845e, 'name': SearchDatastore_Task, 'duration_secs': 0.009665} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.852028] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.852132] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 8c170084-a7f8-4774-8f9d-7874103b0f4f/8c170084-a7f8-4774-8f9d-7874103b0f4f.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1348.852388] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b81f782d-bd16-41b7-b8a8-95937c8bb470 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.859085] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1348.859085] env[62627]: value = "task-2195428" [ 1348.859085] env[62627]: _type = "Task" [ 1348.859085] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.866899] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195428, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.977315] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "refresh_cache-3b1e8b81-7d18-498a-b35e-8ab1c87374c8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.977643] env[62627]: DEBUG nova.compute.manager [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Instance network_info: |[{"id": "f488b8ce-9270-489a-862c-8166c83c1bdc", "address": "fa:16:3e:22:65:5e", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf488b8ce-92", "ovs_interfaceid": "f488b8ce-9270-489a-862c-8166c83c1bdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1348.978078] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:65:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a94c29-ddd5-4383-9219-1c2c3bb09cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f488b8ce-9270-489a-862c-8166c83c1bdc', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1348.986579] env[62627]: DEBUG oslo.service.loopingcall [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1348.986838] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1348.987145] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4948f8d-e442-43db-8480-a59def3bea58 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.008924] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1349.008924] env[62627]: value = "task-2195429" [ 1349.008924] env[62627]: _type = "Task" [ 1349.008924] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.017075] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195429, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.051855] env[62627]: DEBUG nova.compute.manager [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Received event network-changed-4cbd817b-c3ef-4a33-8648-4297830602e0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1349.052178] env[62627]: DEBUG nova.compute.manager [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Refreshing instance network info cache due to event network-changed-4cbd817b-c3ef-4a33-8648-4297830602e0. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1349.052501] env[62627]: DEBUG oslo_concurrency.lockutils [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] Acquiring lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.052696] env[62627]: DEBUG oslo_concurrency.lockutils [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] Acquired lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.052897] env[62627]: DEBUG nova.network.neutron [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Refreshing network info cache for port 4cbd817b-c3ef-4a33-8648-4297830602e0 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1349.138174] env[62627]: DEBUG nova.compute.manager [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1349.168527] env[62627]: DEBUG nova.virt.hardware [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1349.168783] env[62627]: DEBUG nova.virt.hardware [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1349.168943] env[62627]: DEBUG nova.virt.hardware [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1349.169196] env[62627]: DEBUG nova.virt.hardware [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1349.169365] env[62627]: DEBUG nova.virt.hardware [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1349.169524] env[62627]: DEBUG nova.virt.hardware [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1349.169737] env[62627]: DEBUG nova.virt.hardware [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1349.169897] env[62627]: DEBUG nova.virt.hardware [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1349.170086] env[62627]: DEBUG nova.virt.hardware [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1349.170304] env[62627]: DEBUG nova.virt.hardware [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1349.170513] env[62627]: DEBUG nova.virt.hardware [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1349.171416] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364e1510-6cd0-4f5f-af74-e38b666d2ee7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.182754] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bec6b2-c974-45af-b6f8-286c67087007 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.187006] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Didn't find any instances for network info cache update. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1349.187616] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.188236] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.188481] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.188782] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.189108] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.189354] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.189513] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1349.189657] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.370798] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195428, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.410373] env[62627]: DEBUG nova.compute.manager [req-df6a8137-bf7d-4c7f-8dec-2a3f6cdf2b6f req-73784c8c-dcb7-45e3-a3b4-a8a87c7145e3 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Received event network-vif-plugged-e8d180db-2d86-4c84-b9dc-877a37a56877 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1349.410619] env[62627]: DEBUG oslo_concurrency.lockutils [req-df6a8137-bf7d-4c7f-8dec-2a3f6cdf2b6f req-73784c8c-dcb7-45e3-a3b4-a8a87c7145e3 service nova] Acquiring lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.410847] env[62627]: DEBUG oslo_concurrency.lockutils [req-df6a8137-bf7d-4c7f-8dec-2a3f6cdf2b6f req-73784c8c-dcb7-45e3-a3b4-a8a87c7145e3 service nova] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.411075] env[62627]: DEBUG oslo_concurrency.lockutils [req-df6a8137-bf7d-4c7f-8dec-2a3f6cdf2b6f req-73784c8c-dcb7-45e3-a3b4-a8a87c7145e3 service nova] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.411297] env[62627]: DEBUG nova.compute.manager [req-df6a8137-bf7d-4c7f-8dec-2a3f6cdf2b6f req-73784c8c-dcb7-45e3-a3b4-a8a87c7145e3 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] No waiting events found dispatching network-vif-plugged-e8d180db-2d86-4c84-b9dc-877a37a56877 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1349.411502] env[62627]: WARNING nova.compute.manager [req-df6a8137-bf7d-4c7f-8dec-2a3f6cdf2b6f req-73784c8c-dcb7-45e3-a3b4-a8a87c7145e3 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Received unexpected event network-vif-plugged-e8d180db-2d86-4c84-b9dc-877a37a56877 for instance with vm_state building and task_state spawning. [ 1349.513936] env[62627]: DEBUG nova.network.neutron [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Successfully updated port: e8d180db-2d86-4c84-b9dc-877a37a56877 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1349.520313] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195429, 'name': CreateVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.692756] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.692985] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.693170] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.693405] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1349.694198] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7d0912-afa2-4671-abda-dc1906d33268 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.702780] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c872992-3f2b-4bad-9676-c530c1fff929 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.718958] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b008d8-0c39-4402-ac02-14eb4a86179d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.730230] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a57fd12-c0ab-4da8-942c-e92757ea47e9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.760016] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180161MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1349.760193] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.760432] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.796774] env[62627]: DEBUG nova.network.neutron [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updated VIF entry in instance network info cache for port 4cbd817b-c3ef-4a33-8648-4297830602e0. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1349.797249] env[62627]: DEBUG nova.network.neutron [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance_info_cache with network_info: [{"id": "4cbd817b-c3ef-4a33-8648-4297830602e0", "address": "fa:16:3e:e3:f3:a5", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cbd817b-c3", "ovs_interfaceid": "4cbd817b-c3ef-4a33-8648-4297830602e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.870256] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195428, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.651079} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.870642] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 8c170084-a7f8-4774-8f9d-7874103b0f4f/8c170084-a7f8-4774-8f9d-7874103b0f4f.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1349.870742] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1349.870983] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b93c41d-10e0-4c2e-bd5b-51dfadd72948 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.877212] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1349.877212] env[62627]: value = "task-2195430" [ 1349.877212] env[62627]: _type = "Task" [ 1349.877212] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.884153] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195430, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.016431] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "refresh_cache-6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.016627] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "refresh_cache-6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.016733] env[62627]: DEBUG nova.network.neutron [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1350.020948] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195429, 'name': CreateVM_Task, 'duration_secs': 0.522818} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.021346] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1350.022301] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.022301] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.022570] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1350.022732] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1738cbfe-7d06-4cac-90ce-208193d3a293 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.028199] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1350.028199] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52098680-613a-a82d-55df-e23a990fc441" [ 1350.028199] env[62627]: _type = "Task" [ 1350.028199] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.036174] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52098680-613a-a82d-55df-e23a990fc441, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.300098] env[62627]: DEBUG oslo_concurrency.lockutils [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] Releasing lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.300452] env[62627]: DEBUG nova.compute.manager [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Received event network-vif-plugged-f488b8ce-9270-489a-862c-8166c83c1bdc {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1350.300650] env[62627]: DEBUG oslo_concurrency.lockutils [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] Acquiring lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.300852] env[62627]: DEBUG oslo_concurrency.lockutils [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] Lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.301026] env[62627]: DEBUG oslo_concurrency.lockutils [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] Lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.301199] env[62627]: DEBUG nova.compute.manager [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] No waiting events found dispatching network-vif-plugged-f488b8ce-9270-489a-862c-8166c83c1bdc {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1350.301409] env[62627]: WARNING nova.compute.manager [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Received unexpected event network-vif-plugged-f488b8ce-9270-489a-862c-8166c83c1bdc for instance with vm_state building and task_state spawning. [ 1350.301586] env[62627]: DEBUG nova.compute.manager [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Received event network-changed-f488b8ce-9270-489a-862c-8166c83c1bdc {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1350.301740] env[62627]: DEBUG nova.compute.manager [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Refreshing instance network info cache due to event network-changed-f488b8ce-9270-489a-862c-8166c83c1bdc. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1350.301919] env[62627]: DEBUG oslo_concurrency.lockutils [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] Acquiring lock "refresh_cache-3b1e8b81-7d18-498a-b35e-8ab1c87374c8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.302076] env[62627]: DEBUG oslo_concurrency.lockutils [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] Acquired lock "refresh_cache-3b1e8b81-7d18-498a-b35e-8ab1c87374c8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.302330] env[62627]: DEBUG nova.network.neutron [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Refreshing network info cache for port f488b8ce-9270-489a-862c-8166c83c1bdc {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1350.387084] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195430, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074282} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.387311] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1350.388069] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c7ed4e-99fc-4e9c-8865-7d7bd06a9ae0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.409042] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 8c170084-a7f8-4774-8f9d-7874103b0f4f/8c170084-a7f8-4774-8f9d-7874103b0f4f.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1350.409304] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b3453d3-7c1e-4065-807a-acb11d730766 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.427682] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1350.427682] env[62627]: value = "task-2195431" [ 1350.427682] env[62627]: _type = "Task" [ 1350.427682] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.434974] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195431, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.539651] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52098680-613a-a82d-55df-e23a990fc441, 'name': SearchDatastore_Task, 'duration_secs': 0.009323} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.539651] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.539891] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1350.540084] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.540329] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.540468] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1350.540720] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fafb018c-549d-45aa-b47a-af494770551d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.548715] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1350.548889] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1350.549620] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-833e6359-7993-43a4-b1db-5b4359537970 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.554384] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1350.554384] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5263b73a-e558-c96a-e327-70c2aca88231" [ 1350.554384] env[62627]: _type = "Task" [ 1350.554384] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.555038] env[62627]: DEBUG nova.network.neutron [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1350.564028] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5263b73a-e558-c96a-e327-70c2aca88231, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.700445] env[62627]: DEBUG nova.network.neutron [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Updating instance_info_cache with network_info: [{"id": "e8d180db-2d86-4c84-b9dc-877a37a56877", "address": "fa:16:3e:6d:92:92", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d180db-2d", "ovs_interfaceid": "e8d180db-2d86-4c84-b9dc-877a37a56877", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.791722] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1350.791886] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 7e340c99-8d15-4696-98d5-a952f5d2aeee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1350.792024] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 00bb8c04-3c0b-48ff-8127-13d3e15cb86c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1350.792150] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 02b9bb2d-5752-4c09-9661-5966c2ac1bad actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1350.792268] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1350.792382] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 8c170084-a7f8-4774-8f9d-7874103b0f4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1350.792494] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 3b1e8b81-7d18-498a-b35e-8ab1c87374c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1350.792607] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1350.792790] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1350.792928] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1350.890709] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2a4b3b-8c0d-4a13-bcfe-a4344f950195 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.898042] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423460b2-56a5-4280-a752-578ab3b6811c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.929847] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c891f63d-044f-4eaa-8b7c-12b4b5147c5f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.942651] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11de261-7dd1-470d-8417-07d1c9afadc8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.946197] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195431, 'name': ReconfigVM_Task, 'duration_secs': 0.332236} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.946467] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 8c170084-a7f8-4774-8f9d-7874103b0f4f/8c170084-a7f8-4774-8f9d-7874103b0f4f.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1350.947418] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a2468ec0-ecd7-4b03-b146-59d2a3a2190d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.956684] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.961050] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1350.961050] env[62627]: value = "task-2195432" [ 1350.961050] env[62627]: _type = "Task" [ 1350.961050] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.969705] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195432, 'name': Rename_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.069397] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5263b73a-e558-c96a-e327-70c2aca88231, 'name': SearchDatastore_Task, 'duration_secs': 0.008957} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.070315] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-288c36e8-b8d5-477a-87ca-331b35972f37 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.075298] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1351.075298] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ad24a2-9ca2-a38b-6193-e3b4bd7f0301" [ 1351.075298] env[62627]: _type = "Task" [ 1351.075298] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.078743] env[62627]: DEBUG nova.network.neutron [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Updated VIF entry in instance network info cache for port f488b8ce-9270-489a-862c-8166c83c1bdc. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1351.079068] env[62627]: DEBUG nova.network.neutron [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Updating instance_info_cache with network_info: [{"id": "f488b8ce-9270-489a-862c-8166c83c1bdc", "address": "fa:16:3e:22:65:5e", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf488b8ce-92", "ovs_interfaceid": "f488b8ce-9270-489a-862c-8166c83c1bdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.085374] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ad24a2-9ca2-a38b-6193-e3b4bd7f0301, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.203082] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "refresh_cache-6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.203465] env[62627]: DEBUG nova.compute.manager [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Instance network_info: |[{"id": "e8d180db-2d86-4c84-b9dc-877a37a56877", "address": "fa:16:3e:6d:92:92", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d180db-2d", "ovs_interfaceid": "e8d180db-2d86-4c84-b9dc-877a37a56877", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1351.203951] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:92:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8d180db-2d86-4c84-b9dc-877a37a56877', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1351.211225] env[62627]: DEBUG oslo.service.loopingcall [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1351.211463] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1351.211730] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-224e3cb8-974c-4df3-ab6a-e74b36c882be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.231598] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1351.231598] env[62627]: value = "task-2195433" [ 1351.231598] env[62627]: _type = "Task" [ 1351.231598] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.238951] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195433, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.437269] env[62627]: DEBUG nova.compute.manager [req-dac11351-1c14-4dfc-b3dc-71c0e881331e req-db255cf8-2d10-4492-8ca1-32e1c58158d8 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Received event network-changed-e8d180db-2d86-4c84-b9dc-877a37a56877 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1351.437492] env[62627]: DEBUG nova.compute.manager [req-dac11351-1c14-4dfc-b3dc-71c0e881331e req-db255cf8-2d10-4492-8ca1-32e1c58158d8 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Refreshing instance network info cache due to event network-changed-e8d180db-2d86-4c84-b9dc-877a37a56877. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1351.437696] env[62627]: DEBUG oslo_concurrency.lockutils [req-dac11351-1c14-4dfc-b3dc-71c0e881331e req-db255cf8-2d10-4492-8ca1-32e1c58158d8 service nova] Acquiring lock "refresh_cache-6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.437854] env[62627]: DEBUG oslo_concurrency.lockutils [req-dac11351-1c14-4dfc-b3dc-71c0e881331e req-db255cf8-2d10-4492-8ca1-32e1c58158d8 service nova] Acquired lock "refresh_cache-6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.438052] env[62627]: DEBUG nova.network.neutron [req-dac11351-1c14-4dfc-b3dc-71c0e881331e req-db255cf8-2d10-4492-8ca1-32e1c58158d8 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Refreshing network info cache for port e8d180db-2d86-4c84-b9dc-877a37a56877 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1351.462421] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1351.474845] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195432, 'name': Rename_Task, 'duration_secs': 0.137803} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.475129] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1351.475364] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49e07f7f-ae15-4100-a662-494dfb834967 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.482361] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1351.482361] env[62627]: value = "task-2195434" [ 1351.482361] env[62627]: _type = "Task" [ 1351.482361] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.490596] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195434, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.582106] env[62627]: DEBUG oslo_concurrency.lockutils [req-50cfc054-3010-4372-ac69-24054da25517 req-de1e3aab-03d7-4329-8f56-9b2b305688e5 service nova] Releasing lock "refresh_cache-3b1e8b81-7d18-498a-b35e-8ab1c87374c8" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.585887] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ad24a2-9ca2-a38b-6193-e3b4bd7f0301, 'name': SearchDatastore_Task, 'duration_secs': 0.009727} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.586147] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.586398] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 3b1e8b81-7d18-498a-b35e-8ab1c87374c8/3b1e8b81-7d18-498a-b35e-8ab1c87374c8.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1351.586649] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf68add3-ee34-43f3-92c7-2945f0fae59f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.593775] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1351.593775] env[62627]: value = "task-2195435" [ 1351.593775] env[62627]: _type = "Task" [ 1351.593775] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.601942] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195435, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.743746] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195433, 'name': CreateVM_Task, 'duration_secs': 0.312889} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.743949] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1351.744646] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.744812] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.745166] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1351.745433] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c31bc281-581c-4f41-938c-e1a82df5a7c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.750474] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1351.750474] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]524bc5b9-a26c-e755-1fee-067fe2586618" [ 1351.750474] env[62627]: _type = "Task" [ 1351.750474] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.758501] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524bc5b9-a26c-e755-1fee-067fe2586618, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.970738] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1351.971210] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.211s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.994742] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195434, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.106658] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195435, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457138} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.106902] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 3b1e8b81-7d18-498a-b35e-8ab1c87374c8/3b1e8b81-7d18-498a-b35e-8ab1c87374c8.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1352.107138] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1352.107466] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-31d90afb-1283-434a-b193-0c4763c6ca73 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.114443] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1352.114443] env[62627]: value = "task-2195436" [ 1352.114443] env[62627]: _type = "Task" [ 1352.114443] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.122274] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195436, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.220970] env[62627]: DEBUG nova.network.neutron [req-dac11351-1c14-4dfc-b3dc-71c0e881331e req-db255cf8-2d10-4492-8ca1-32e1c58158d8 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Updated VIF entry in instance network info cache for port e8d180db-2d86-4c84-b9dc-877a37a56877. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1352.221589] env[62627]: DEBUG nova.network.neutron [req-dac11351-1c14-4dfc-b3dc-71c0e881331e req-db255cf8-2d10-4492-8ca1-32e1c58158d8 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Updating instance_info_cache with network_info: [{"id": "e8d180db-2d86-4c84-b9dc-877a37a56877", "address": "fa:16:3e:6d:92:92", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d180db-2d", "ovs_interfaceid": "e8d180db-2d86-4c84-b9dc-877a37a56877", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.261419] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524bc5b9-a26c-e755-1fee-067fe2586618, 'name': SearchDatastore_Task, 'duration_secs': 0.057466} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.261759] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.261995] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1352.262253] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.262429] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.262638] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1352.262858] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc33dccc-89bf-4de9-943d-2df95749bb02 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.271804] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1352.271971] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1352.272732] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bb5b284-d989-4bba-a205-8b4548167617 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.277994] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1352.277994] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5244547b-6672-f6d4-edf5-12c754ec0220" [ 1352.277994] env[62627]: _type = "Task" [ 1352.277994] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.285976] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5244547b-6672-f6d4-edf5-12c754ec0220, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.493674] env[62627]: DEBUG oslo_vmware.api [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195434, 'name': PowerOnVM_Task, 'duration_secs': 0.682831} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.493931] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1352.494223] env[62627]: INFO nova.compute.manager [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Took 7.74 seconds to spawn the instance on the hypervisor. [ 1352.494427] env[62627]: DEBUG nova.compute.manager [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1352.495540] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4020fe88-c1df-4fda-b9dc-1532ed1358c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.624295] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195436, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091739} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.624574] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1352.625372] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9404048b-2cfb-4fdf-83bf-db144a9a9c73 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.646920] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 3b1e8b81-7d18-498a-b35e-8ab1c87374c8/3b1e8b81-7d18-498a-b35e-8ab1c87374c8.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1352.647254] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ab0fda9-5235-482a-be10-258f37fbb8b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.666670] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1352.666670] env[62627]: value = "task-2195437" [ 1352.666670] env[62627]: _type = "Task" [ 1352.666670] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.674432] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195437, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.724592] env[62627]: DEBUG oslo_concurrency.lockutils [req-dac11351-1c14-4dfc-b3dc-71c0e881331e req-db255cf8-2d10-4492-8ca1-32e1c58158d8 service nova] Releasing lock "refresh_cache-6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.788827] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5244547b-6672-f6d4-edf5-12c754ec0220, 'name': SearchDatastore_Task, 'duration_secs': 0.026567} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.789633] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86a7088e-f35f-4bb7-9116-584e6cb49e38 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.795006] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1352.795006] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c6bbbd-68d4-01cf-77b6-e85a224f1cc5" [ 1352.795006] env[62627]: _type = "Task" [ 1352.795006] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.802416] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c6bbbd-68d4-01cf-77b6-e85a224f1cc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.016286] env[62627]: INFO nova.compute.manager [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Took 13.69 seconds to build instance. [ 1353.180411] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195437, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.309061] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c6bbbd-68d4-01cf-77b6-e85a224f1cc5, 'name': SearchDatastore_Task, 'duration_secs': 0.031802} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.309061] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.309061] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db/6c4ebb14-c3a3-485c-bbb3-405a8b7f92db.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1353.309061] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e68dfe01-a7fe-408f-a478-24cfb9d3cc2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.316861] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1353.316861] env[62627]: value = "task-2195438" [ 1353.316861] env[62627]: _type = "Task" [ 1353.316861] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.325685] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.518722] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0604498-ce93-4f35-9d60-6edca64d12bc tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.196s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.676992] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195437, 'name': ReconfigVM_Task, 'duration_secs': 0.838983} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.677297] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 3b1e8b81-7d18-498a-b35e-8ab1c87374c8/3b1e8b81-7d18-498a-b35e-8ab1c87374c8.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1353.677970] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39a33789-8638-4b9c-97df-274299210d64 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.684877] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1353.684877] env[62627]: value = "task-2195439" [ 1353.684877] env[62627]: _type = "Task" [ 1353.684877] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.694086] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195439, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.763123] env[62627]: DEBUG nova.compute.manager [req-5e36195b-a202-4604-a05f-8b5b13b48177 req-84fa9336-a262-47fe-b228-d05af907cd99 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Received event network-changed-71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1353.763577] env[62627]: DEBUG nova.compute.manager [req-5e36195b-a202-4604-a05f-8b5b13b48177 req-84fa9336-a262-47fe-b228-d05af907cd99 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Refreshing instance network info cache due to event network-changed-71c1451f-de1d-4700-8326-282bc39bb6f7. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1353.763742] env[62627]: DEBUG oslo_concurrency.lockutils [req-5e36195b-a202-4604-a05f-8b5b13b48177 req-84fa9336-a262-47fe-b228-d05af907cd99 service nova] Acquiring lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.763795] env[62627]: DEBUG oslo_concurrency.lockutils [req-5e36195b-a202-4604-a05f-8b5b13b48177 req-84fa9336-a262-47fe-b228-d05af907cd99 service nova] Acquired lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.763950] env[62627]: DEBUG nova.network.neutron [req-5e36195b-a202-4604-a05f-8b5b13b48177 req-84fa9336-a262-47fe-b228-d05af907cd99 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Refreshing network info cache for port 71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1353.826824] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195438, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.195056] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195439, 'name': Rename_Task, 'duration_secs': 0.466802} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.195536] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1354.195649] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1b244ad-ac33-4625-ab46-59d0fb00ff79 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.202203] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1354.202203] env[62627]: value = "task-2195440" [ 1354.202203] env[62627]: _type = "Task" [ 1354.202203] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.209681] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195440, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.327091] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195438, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617122} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.327380] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db/6c4ebb14-c3a3-485c-bbb3-405a8b7f92db.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1354.327619] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1354.327886] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c751c8d-82b9-43b9-b46d-09cec833f7e6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.334919] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1354.334919] env[62627]: value = "task-2195441" [ 1354.334919] env[62627]: _type = "Task" [ 1354.334919] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.343606] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195441, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.559739] env[62627]: DEBUG nova.network.neutron [req-5e36195b-a202-4604-a05f-8b5b13b48177 req-84fa9336-a262-47fe-b228-d05af907cd99 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updated VIF entry in instance network info cache for port 71c1451f-de1d-4700-8326-282bc39bb6f7. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1354.560187] env[62627]: DEBUG nova.network.neutron [req-5e36195b-a202-4604-a05f-8b5b13b48177 req-84fa9336-a262-47fe-b228-d05af907cd99 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updating instance_info_cache with network_info: [{"id": "71c1451f-de1d-4700-8326-282bc39bb6f7", "address": "fa:16:3e:fb:be:f1", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71c1451f-de", "ovs_interfaceid": "71c1451f-de1d-4700-8326-282bc39bb6f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.712413] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195440, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.845148] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195441, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109475} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.845461] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1354.846263] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8cec15-1a06-423f-94ff-19d4b6dc5105 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.868563] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db/6c4ebb14-c3a3-485c-bbb3-405a8b7f92db.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1354.868866] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9982cef-ad36-4e64-bff7-bcd579129a06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.890606] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1354.890606] env[62627]: value = "task-2195442" [ 1354.890606] env[62627]: _type = "Task" [ 1354.890606] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.898363] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195442, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.063538] env[62627]: DEBUG oslo_concurrency.lockutils [req-5e36195b-a202-4604-a05f-8b5b13b48177 req-84fa9336-a262-47fe-b228-d05af907cd99 service nova] Releasing lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.212539] env[62627]: DEBUG oslo_vmware.api [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195440, 'name': PowerOnVM_Task, 'duration_secs': 1.001935} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.212894] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1355.213012] env[62627]: INFO nova.compute.manager [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Took 8.26 seconds to spawn the instance on the hypervisor. [ 1355.213200] env[62627]: DEBUG nova.compute.manager [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1355.213946] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a62c08-b0ea-42b0-9f19-b3e3b0ade940 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.401246] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195442, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.731020] env[62627]: INFO nova.compute.manager [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Took 13.91 seconds to build instance. [ 1355.900830] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195442, 'name': ReconfigVM_Task, 'duration_secs': 0.563748} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.901166] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db/6c4ebb14-c3a3-485c-bbb3-405a8b7f92db.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1355.901850] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0919294b-aa94-439b-a446-aaa2d088aedd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.908115] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1355.908115] env[62627]: value = "task-2195443" [ 1355.908115] env[62627]: _type = "Task" [ 1355.908115] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.916079] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195443, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.233108] env[62627]: DEBUG oslo_concurrency.lockutils [None req-182a16c2-b3bf-4e0f-9147-691ce43ae0e8 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.422s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.418842] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195443, 'name': Rename_Task, 'duration_secs': 0.140301} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.419131] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1356.419390] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d29c6d0-e2cb-40e0-914a-2643275a033a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.426044] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1356.426044] env[62627]: value = "task-2195444" [ 1356.426044] env[62627]: _type = "Task" [ 1356.426044] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.435499] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195444, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.565259] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "cabc73d8-0304-4a7a-8101-068986ac9c3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.565487] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "cabc73d8-0304-4a7a-8101-068986ac9c3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.936313] env[62627]: DEBUG oslo_vmware.api [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195444, 'name': PowerOnVM_Task, 'duration_secs': 0.499301} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.936619] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1356.936851] env[62627]: INFO nova.compute.manager [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Took 7.80 seconds to spawn the instance on the hypervisor. [ 1356.937047] env[62627]: DEBUG nova.compute.manager [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1356.937820] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd04b3f-0b0a-4ec8-a326-42fba8616465 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.068351] env[62627]: DEBUG nova.compute.manager [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1357.454226] env[62627]: INFO nova.compute.manager [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Took 13.30 seconds to build instance. [ 1357.591259] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.591405] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.592811] env[62627]: INFO nova.compute.claims [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1357.956447] env[62627]: DEBUG oslo_concurrency.lockutils [None req-26af1cb3-a686-4433-b81a-6fb6321a1369 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.811s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1358.565549] env[62627]: DEBUG nova.compute.manager [req-fe583605-38b7-4e38-b73a-059397d8529d req-0ea76dd7-df9a-4d2b-a282-9190ce2d47d1 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Received event network-changed-e8d180db-2d86-4c84-b9dc-877a37a56877 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1358.565816] env[62627]: DEBUG nova.compute.manager [req-fe583605-38b7-4e38-b73a-059397d8529d req-0ea76dd7-df9a-4d2b-a282-9190ce2d47d1 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Refreshing instance network info cache due to event network-changed-e8d180db-2d86-4c84-b9dc-877a37a56877. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1358.565926] env[62627]: DEBUG oslo_concurrency.lockutils [req-fe583605-38b7-4e38-b73a-059397d8529d req-0ea76dd7-df9a-4d2b-a282-9190ce2d47d1 service nova] Acquiring lock "refresh_cache-6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1358.566087] env[62627]: DEBUG oslo_concurrency.lockutils [req-fe583605-38b7-4e38-b73a-059397d8529d req-0ea76dd7-df9a-4d2b-a282-9190ce2d47d1 service nova] Acquired lock "refresh_cache-6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.566258] env[62627]: DEBUG nova.network.neutron [req-fe583605-38b7-4e38-b73a-059397d8529d req-0ea76dd7-df9a-4d2b-a282-9190ce2d47d1 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Refreshing network info cache for port e8d180db-2d86-4c84-b9dc-877a37a56877 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1358.708220] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6892cc4d-46ec-4acb-9d70-9db87b718ac8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.715873] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cb3096-258b-4274-b524-a2934e9232c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.748605] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb0d342-7063-4ab5-b358-ed61ee042773 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.755945] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb5db9d-407b-44ae-a851-c5f734059d80 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.769074] env[62627]: DEBUG nova.compute.provider_tree [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1359.272019] env[62627]: DEBUG nova.scheduler.client.report [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1359.302035] env[62627]: DEBUG nova.network.neutron [req-fe583605-38b7-4e38-b73a-059397d8529d req-0ea76dd7-df9a-4d2b-a282-9190ce2d47d1 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Updated VIF entry in instance network info cache for port e8d180db-2d86-4c84-b9dc-877a37a56877. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1359.302393] env[62627]: DEBUG nova.network.neutron [req-fe583605-38b7-4e38-b73a-059397d8529d req-0ea76dd7-df9a-4d2b-a282-9190ce2d47d1 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Updating instance_info_cache with network_info: [{"id": "e8d180db-2d86-4c84-b9dc-877a37a56877", "address": "fa:16:3e:6d:92:92", "network": {"id": "76eae841-5dda-4384-92ba-55247808e11f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1004379592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f41b0c171180469e9092b8a8da697a78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8d180db-2d", "ovs_interfaceid": "e8d180db-2d86-4c84-b9dc-877a37a56877", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.412781] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.413164] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.413464] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.413681] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.413966] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.416490] env[62627]: INFO nova.compute.manager [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Terminating instance [ 1359.777075] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.186s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.777654] env[62627]: DEBUG nova.compute.manager [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1359.804542] env[62627]: DEBUG oslo_concurrency.lockutils [req-fe583605-38b7-4e38-b73a-059397d8529d req-0ea76dd7-df9a-4d2b-a282-9190ce2d47d1 service nova] Releasing lock "refresh_cache-6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1359.921109] env[62627]: DEBUG nova.compute.manager [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1359.921109] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1359.921343] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b30cf0f4-f4a1-41ae-8a08-ebc9c0e23bb1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.929555] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1359.929555] env[62627]: value = "task-2195445" [ 1359.929555] env[62627]: _type = "Task" [ 1359.929555] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.939176] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195445, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.282481] env[62627]: DEBUG nova.compute.utils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1360.283972] env[62627]: DEBUG nova.compute.manager [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1360.284169] env[62627]: DEBUG nova.network.neutron [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1360.328610] env[62627]: DEBUG nova.policy [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd847ab3ec20743d0997defea8008448e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '710e7463994b4ec08bcc6d479cd1138b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1360.439747] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195445, 'name': PowerOffVM_Task, 'duration_secs': 0.171936} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.440087] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1360.440228] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1360.440463] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447830', 'volume_id': '68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'name': 'volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '02b9bb2d-5752-4c09-9661-5966c2ac1bad', 'attached_at': '2025-01-30T20:40:50.000000', 'detached_at': '', 'volume_id': '68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'serial': '68c69bdc-55ac-48af-b285-e6d2d8dd861d'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1360.441238] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd4c4a4-44ed-457d-88b8-e6a03a582cb4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.459390] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86127296-cd99-4a50-9415-400bd63edb13 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.466156] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffdaf14-2df6-4aeb-ab2d-d82d8623cbaf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.483392] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdc682e-f264-415f-8cc3-165278d011f8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.499230] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] The volume has not been displaced from its original location: [datastore2] volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d/volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1360.504533] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1360.504805] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fb29ada-1be5-473d-951e-1c3549dfb9fa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.522207] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1360.522207] env[62627]: value = "task-2195446" [ 1360.522207] env[62627]: _type = "Task" [ 1360.522207] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.530570] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195446, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.592589] env[62627]: DEBUG nova.network.neutron [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Successfully created port: 2e697b33-f9f6-4771-a3e6-a129565bbd17 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1360.787750] env[62627]: DEBUG nova.compute.manager [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1361.031940] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195446, 'name': ReconfigVM_Task, 'duration_secs': 0.199761} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.032168] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1361.036733] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d98ca9a7-d47d-4511-a6f5-a4e13aa630a4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.051789] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1361.051789] env[62627]: value = "task-2195447" [ 1361.051789] env[62627]: _type = "Task" [ 1361.051789] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.059798] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195447, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.562789] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195447, 'name': ReconfigVM_Task, 'duration_secs': 0.361824} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.563071] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447830', 'volume_id': '68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'name': 'volume-68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '02b9bb2d-5752-4c09-9661-5966c2ac1bad', 'attached_at': '2025-01-30T20:40:50.000000', 'detached_at': '', 'volume_id': '68c69bdc-55ac-48af-b285-e6d2d8dd861d', 'serial': '68c69bdc-55ac-48af-b285-e6d2d8dd861d'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1361.563351] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1361.564136] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8051505e-67de-400a-aeb9-d3793e180cd5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.570998] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1361.571238] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffa1859c-6647-416a-9fb8-c86fd4f87f76 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.629331] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1361.629581] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1361.629780] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleting the datastore file [datastore2] 02b9bb2d-5752-4c09-9661-5966c2ac1bad {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1361.630063] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f25e53f-349e-4eba-a10c-03d97d8fc1ea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.636584] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1361.636584] env[62627]: value = "task-2195449" [ 1361.636584] env[62627]: _type = "Task" [ 1361.636584] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.644460] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195449, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.796992] env[62627]: DEBUG nova.compute.manager [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1361.823611] env[62627]: DEBUG nova.virt.hardware [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1361.823855] env[62627]: DEBUG nova.virt.hardware [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1361.824023] env[62627]: DEBUG nova.virt.hardware [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1361.824213] env[62627]: DEBUG nova.virt.hardware [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1361.824362] env[62627]: DEBUG nova.virt.hardware [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1361.824512] env[62627]: DEBUG nova.virt.hardware [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1361.825021] env[62627]: DEBUG nova.virt.hardware [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1361.825021] env[62627]: DEBUG nova.virt.hardware [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1361.825142] env[62627]: DEBUG nova.virt.hardware [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1361.825227] env[62627]: DEBUG nova.virt.hardware [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1361.825400] env[62627]: DEBUG nova.virt.hardware [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1361.826273] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011d051c-1583-4314-94d8-bbf7270e2767 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.834365] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7588a7d3-2794-48d3-b311-f5b095b8f6d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.956172] env[62627]: DEBUG nova.compute.manager [req-e47534e3-a178-4a70-968a-3f259b0f1ca5 req-0533fa32-9225-4025-b927-c7b95abb7f5a service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Received event network-vif-plugged-2e697b33-f9f6-4771-a3e6-a129565bbd17 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1361.956368] env[62627]: DEBUG oslo_concurrency.lockutils [req-e47534e3-a178-4a70-968a-3f259b0f1ca5 req-0533fa32-9225-4025-b927-c7b95abb7f5a service nova] Acquiring lock "cabc73d8-0304-4a7a-8101-068986ac9c3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.956577] env[62627]: DEBUG oslo_concurrency.lockutils [req-e47534e3-a178-4a70-968a-3f259b0f1ca5 req-0533fa32-9225-4025-b927-c7b95abb7f5a service nova] Lock "cabc73d8-0304-4a7a-8101-068986ac9c3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.956746] env[62627]: DEBUG oslo_concurrency.lockutils [req-e47534e3-a178-4a70-968a-3f259b0f1ca5 req-0533fa32-9225-4025-b927-c7b95abb7f5a service nova] Lock "cabc73d8-0304-4a7a-8101-068986ac9c3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.956911] env[62627]: DEBUG nova.compute.manager [req-e47534e3-a178-4a70-968a-3f259b0f1ca5 req-0533fa32-9225-4025-b927-c7b95abb7f5a service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] No waiting events found dispatching network-vif-plugged-2e697b33-f9f6-4771-a3e6-a129565bbd17 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1361.957084] env[62627]: WARNING nova.compute.manager [req-e47534e3-a178-4a70-968a-3f259b0f1ca5 req-0533fa32-9225-4025-b927-c7b95abb7f5a service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Received unexpected event network-vif-plugged-2e697b33-f9f6-4771-a3e6-a129565bbd17 for instance with vm_state building and task_state spawning. [ 1362.043875] env[62627]: DEBUG nova.network.neutron [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Successfully updated port: 2e697b33-f9f6-4771-a3e6-a129565bbd17 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1362.147541] env[62627]: DEBUG oslo_vmware.api [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195449, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075331} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.147794] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1362.147976] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1362.148170] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1362.148372] env[62627]: INFO nova.compute.manager [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Took 2.23 seconds to destroy the instance on the hypervisor. [ 1362.148634] env[62627]: DEBUG oslo.service.loopingcall [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1362.148824] env[62627]: DEBUG nova.compute.manager [-] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1362.148917] env[62627]: DEBUG nova.network.neutron [-] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1362.548792] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "refresh_cache-cabc73d8-0304-4a7a-8101-068986ac9c3e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1362.548996] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "refresh_cache-cabc73d8-0304-4a7a-8101-068986ac9c3e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.549310] env[62627]: DEBUG nova.network.neutron [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1363.082523] env[62627]: DEBUG nova.network.neutron [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1363.138374] env[62627]: DEBUG nova.network.neutron [-] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.218920] env[62627]: DEBUG nova.network.neutron [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Updating instance_info_cache with network_info: [{"id": "2e697b33-f9f6-4771-a3e6-a129565bbd17", "address": "fa:16:3e:d8:33:c7", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e697b33-f9", "ovs_interfaceid": "2e697b33-f9f6-4771-a3e6-a129565bbd17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.641175] env[62627]: INFO nova.compute.manager [-] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Took 1.49 seconds to deallocate network for instance. [ 1363.721720] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "refresh_cache-cabc73d8-0304-4a7a-8101-068986ac9c3e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.722127] env[62627]: DEBUG nova.compute.manager [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Instance network_info: |[{"id": "2e697b33-f9f6-4771-a3e6-a129565bbd17", "address": "fa:16:3e:d8:33:c7", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e697b33-f9", "ovs_interfaceid": "2e697b33-f9f6-4771-a3e6-a129565bbd17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1363.722577] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:33:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a94c29-ddd5-4383-9219-1c2c3bb09cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e697b33-f9f6-4771-a3e6-a129565bbd17', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1363.730653] env[62627]: DEBUG oslo.service.loopingcall [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1363.730925] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1363.731188] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c512d99b-baf3-4841-be25-5e492d80295d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.756341] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1363.756341] env[62627]: value = "task-2195450" [ 1363.756341] env[62627]: _type = "Task" [ 1363.756341] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.764863] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195450, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.987089] env[62627]: DEBUG nova.compute.manager [req-b542b8d1-ca9f-4b7d-ba2e-906eb7517114 req-97f8bb6b-f6ba-4468-a0c5-c343b8d6061f service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Received event network-changed-2e697b33-f9f6-4771-a3e6-a129565bbd17 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1363.987267] env[62627]: DEBUG nova.compute.manager [req-b542b8d1-ca9f-4b7d-ba2e-906eb7517114 req-97f8bb6b-f6ba-4468-a0c5-c343b8d6061f service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Refreshing instance network info cache due to event network-changed-2e697b33-f9f6-4771-a3e6-a129565bbd17. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1363.987563] env[62627]: DEBUG oslo_concurrency.lockutils [req-b542b8d1-ca9f-4b7d-ba2e-906eb7517114 req-97f8bb6b-f6ba-4468-a0c5-c343b8d6061f service nova] Acquiring lock "refresh_cache-cabc73d8-0304-4a7a-8101-068986ac9c3e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1363.987725] env[62627]: DEBUG oslo_concurrency.lockutils [req-b542b8d1-ca9f-4b7d-ba2e-906eb7517114 req-97f8bb6b-f6ba-4468-a0c5-c343b8d6061f service nova] Acquired lock "refresh_cache-cabc73d8-0304-4a7a-8101-068986ac9c3e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.987892] env[62627]: DEBUG nova.network.neutron [req-b542b8d1-ca9f-4b7d-ba2e-906eb7517114 req-97f8bb6b-f6ba-4468-a0c5-c343b8d6061f service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Refreshing network info cache for port 2e697b33-f9f6-4771-a3e6-a129565bbd17 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1364.191819] env[62627]: INFO nova.compute.manager [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Took 0.55 seconds to detach 1 volumes for instance. [ 1364.195402] env[62627]: DEBUG nova.compute.manager [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Deleting volume: 68c69bdc-55ac-48af-b285-e6d2d8dd861d {{(pid=62627) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1364.266392] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195450, 'name': CreateVM_Task, 'duration_secs': 0.384556} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.267195] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1364.267895] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.268076] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.268491] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1364.268985] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01f48fd8-d862-4e48-a0f2-9c6cd6468471 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.273827] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1364.273827] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e8f1ea-4ed0-4339-526d-f08f362d6ed7" [ 1364.273827] env[62627]: _type = "Task" [ 1364.273827] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.282264] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e8f1ea-4ed0-4339-526d-f08f362d6ed7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.705349] env[62627]: DEBUG nova.network.neutron [req-b542b8d1-ca9f-4b7d-ba2e-906eb7517114 req-97f8bb6b-f6ba-4468-a0c5-c343b8d6061f service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Updated VIF entry in instance network info cache for port 2e697b33-f9f6-4771-a3e6-a129565bbd17. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1364.705719] env[62627]: DEBUG nova.network.neutron [req-b542b8d1-ca9f-4b7d-ba2e-906eb7517114 req-97f8bb6b-f6ba-4468-a0c5-c343b8d6061f service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Updating instance_info_cache with network_info: [{"id": "2e697b33-f9f6-4771-a3e6-a129565bbd17", "address": "fa:16:3e:d8:33:c7", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e697b33-f9", "ovs_interfaceid": "2e697b33-f9f6-4771-a3e6-a129565bbd17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.736223] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.736513] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.736762] env[62627]: DEBUG nova.objects.instance [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lazy-loading 'resources' on Instance uuid 02b9bb2d-5752-4c09-9661-5966c2ac1bad {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1364.784502] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52e8f1ea-4ed0-4339-526d-f08f362d6ed7, 'name': SearchDatastore_Task, 'duration_secs': 0.012515} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.784805] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.785077] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1364.785332] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.785484] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.785666] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1364.785925] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33a0832d-1b93-469f-b1f1-827e1958503c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.794529] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1364.794713] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1364.795451] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2421b029-5400-49a6-9548-3dadca405fe3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.802219] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1364.802219] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]525411f6-e755-9384-fbce-a91dfb83be14" [ 1364.802219] env[62627]: _type = "Task" [ 1364.802219] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.809777] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525411f6-e755-9384-fbce-a91dfb83be14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.208162] env[62627]: DEBUG oslo_concurrency.lockutils [req-b542b8d1-ca9f-4b7d-ba2e-906eb7517114 req-97f8bb6b-f6ba-4468-a0c5-c343b8d6061f service nova] Releasing lock "refresh_cache-cabc73d8-0304-4a7a-8101-068986ac9c3e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.208527] env[62627]: DEBUG nova.compute.manager [req-b542b8d1-ca9f-4b7d-ba2e-906eb7517114 req-97f8bb6b-f6ba-4468-a0c5-c343b8d6061f service nova] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Received event network-vif-deleted-213d4e79-8676-47f0-9e73-c3a36df750d1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1365.313367] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]525411f6-e755-9384-fbce-a91dfb83be14, 'name': SearchDatastore_Task, 'duration_secs': 0.009055} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.314191] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35992036-2ac5-4568-a2bb-338187f96f83 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.320084] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1365.320084] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]524123eb-7e76-7d6b-5a2b-7a12cd7cba13" [ 1365.320084] env[62627]: _type = "Task" [ 1365.320084] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.327262] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524123eb-7e76-7d6b-5a2b-7a12cd7cba13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.348188] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e242ad-6978-460d-a365-c91359de3600 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.354853] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f43955-b3af-4256-93e5-7d5e30683b0f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.385824] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9780334c-5506-417c-9cc1-6dcdb2cca21b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.393262] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2f6b2d-e975-489c-9f74-43c1298e7d9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.407359] env[62627]: DEBUG nova.compute.provider_tree [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1365.830420] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]524123eb-7e76-7d6b-5a2b-7a12cd7cba13, 'name': SearchDatastore_Task, 'duration_secs': 0.013557} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.830675] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.830929] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] cabc73d8-0304-4a7a-8101-068986ac9c3e/cabc73d8-0304-4a7a-8101-068986ac9c3e.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1365.831215] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1215b077-7457-41ed-83a1-e93ccdf09c9b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.838129] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1365.838129] env[62627]: value = "task-2195452" [ 1365.838129] env[62627]: _type = "Task" [ 1365.838129] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.846912] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.911235] env[62627]: DEBUG nova.scheduler.client.report [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1366.349930] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195452, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.415961] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.679s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.433825] env[62627]: INFO nova.scheduler.client.report [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted allocations for instance 02b9bb2d-5752-4c09-9661-5966c2ac1bad [ 1366.849288] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195452, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527141} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.849532] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] cabc73d8-0304-4a7a-8101-068986ac9c3e/cabc73d8-0304-4a7a-8101-068986ac9c3e.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1366.849718] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1366.849971] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b84740f2-fd84-4317-adae-8e2b8011922f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.856117] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1366.856117] env[62627]: value = "task-2195453" [ 1366.856117] env[62627]: _type = "Task" [ 1366.856117] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.863764] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195453, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.941223] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e1e030e-4bf3-4d94-81a5-e865df342d9a tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "02b9bb2d-5752-4c09-9661-5966c2ac1bad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.528s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.206056] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "7e340c99-8d15-4696-98d5-a952f5d2aeee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.206310] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "7e340c99-8d15-4696-98d5-a952f5d2aeee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.206522] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "7e340c99-8d15-4696-98d5-a952f5d2aeee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.206711] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "7e340c99-8d15-4696-98d5-a952f5d2aeee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.206884] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "7e340c99-8d15-4696-98d5-a952f5d2aeee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.209379] env[62627]: INFO nova.compute.manager [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Terminating instance [ 1367.366176] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195453, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.056606} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.366524] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1367.367245] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad299e62-362d-4931-8ec1-857c1f43565f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.389865] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] cabc73d8-0304-4a7a-8101-068986ac9c3e/cabc73d8-0304-4a7a-8101-068986ac9c3e.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1367.390158] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a412d815-76f6-4961-b28e-6bc7a04850a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.410481] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1367.410481] env[62627]: value = "task-2195454" [ 1367.410481] env[62627]: _type = "Task" [ 1367.410481] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.418483] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195454, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.713763] env[62627]: DEBUG nova.compute.manager [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1367.714075] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1367.715008] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a43c7c8-fd5b-408f-9e93-c851c3d5a0c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.722757] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1367.722998] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6769e3a4-b8ff-4af4-8151-cdf422c62580 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.729056] env[62627]: DEBUG oslo_vmware.api [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1367.729056] env[62627]: value = "task-2195455" [ 1367.729056] env[62627]: _type = "Task" [ 1367.729056] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.740224] env[62627]: DEBUG oslo_vmware.api [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.922673] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195454, 'name': ReconfigVM_Task, 'duration_secs': 0.352244} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.922673] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Reconfigured VM instance instance-00000074 to attach disk [datastore2] cabc73d8-0304-4a7a-8101-068986ac9c3e/cabc73d8-0304-4a7a-8101-068986ac9c3e.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1367.923174] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46cd4aad-cc38-4c8f-8393-cbf27b254b08 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.930016] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1367.930016] env[62627]: value = "task-2195456" [ 1367.930016] env[62627]: _type = "Task" [ 1367.930016] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.938701] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195456, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.239332] env[62627]: DEBUG oslo_vmware.api [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195455, 'name': PowerOffVM_Task, 'duration_secs': 0.166366} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.239644] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1368.239820] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1368.240094] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff6e193e-8176-4e4c-9109-21f8cad6a75d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.297144] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1368.297397] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1368.297571] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleting the datastore file [datastore2] 7e340c99-8d15-4696-98d5-a952f5d2aeee {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1368.297828] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66ffbc30-7bb1-433c-8e9f-c20d1b85523c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.304792] env[62627]: DEBUG oslo_vmware.api [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1368.304792] env[62627]: value = "task-2195458" [ 1368.304792] env[62627]: _type = "Task" [ 1368.304792] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.312248] env[62627]: DEBUG oslo_vmware.api [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195458, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.440521] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195456, 'name': Rename_Task, 'duration_secs': 0.132442} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.440830] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1368.440986] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3cec264-0863-4d6d-b34f-3becc37ec510 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.447114] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1368.447114] env[62627]: value = "task-2195459" [ 1368.447114] env[62627]: _type = "Task" [ 1368.447114] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.454805] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195459, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.815903] env[62627]: DEBUG oslo_vmware.api [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195458, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135371} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.816234] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1368.816470] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1368.816689] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1368.816914] env[62627]: INFO nova.compute.manager [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1368.817227] env[62627]: DEBUG oslo.service.loopingcall [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1368.817470] env[62627]: DEBUG nova.compute.manager [-] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1368.817604] env[62627]: DEBUG nova.network.neutron [-] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1368.957590] env[62627]: DEBUG oslo_vmware.api [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195459, 'name': PowerOnVM_Task, 'duration_secs': 0.442398} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.957863] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1368.958078] env[62627]: INFO nova.compute.manager [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Took 7.16 seconds to spawn the instance on the hypervisor. [ 1368.958275] env[62627]: DEBUG nova.compute.manager [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1368.959053] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271f90a6-1e6a-4a41-ba7a-40611c59a891 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.073072] env[62627]: DEBUG nova.compute.manager [req-7896a39c-1957-425f-a48e-0fd09a1a3c65 req-29364261-54cd-4b9b-b0ff-6b715d41ff6f service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Received event network-vif-deleted-2cabec54-95a1-48db-b0ba-596ee7f4671e {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1369.073296] env[62627]: INFO nova.compute.manager [req-7896a39c-1957-425f-a48e-0fd09a1a3c65 req-29364261-54cd-4b9b-b0ff-6b715d41ff6f service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Neutron deleted interface 2cabec54-95a1-48db-b0ba-596ee7f4671e; detaching it from the instance and deleting it from the info cache [ 1369.073542] env[62627]: DEBUG nova.network.neutron [req-7896a39c-1957-425f-a48e-0fd09a1a3c65 req-29364261-54cd-4b9b-b0ff-6b715d41ff6f service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.474753] env[62627]: INFO nova.compute.manager [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Took 11.90 seconds to build instance. [ 1369.558556] env[62627]: DEBUG nova.network.neutron [-] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.575889] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80f27df7-a4a1-4f1d-bbfd-3051de706c79 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.586346] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12ee70a-8906-4569-9c3b-d41c2d4c7be4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.616009] env[62627]: DEBUG nova.compute.manager [req-7896a39c-1957-425f-a48e-0fd09a1a3c65 req-29364261-54cd-4b9b-b0ff-6b715d41ff6f service nova] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Detach interface failed, port_id=2cabec54-95a1-48db-b0ba-596ee7f4671e, reason: Instance 7e340c99-8d15-4696-98d5-a952f5d2aeee could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1369.976813] env[62627]: DEBUG oslo_concurrency.lockutils [None req-129f2c74-f226-43a6-bac9-3c855b5bdc04 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "cabc73d8-0304-4a7a-8101-068986ac9c3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.411s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1370.061055] env[62627]: INFO nova.compute.manager [-] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Took 1.24 seconds to deallocate network for instance. [ 1370.148412] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "cabc73d8-0304-4a7a-8101-068986ac9c3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.148699] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "cabc73d8-0304-4a7a-8101-068986ac9c3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.148901] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "cabc73d8-0304-4a7a-8101-068986ac9c3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.149102] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "cabc73d8-0304-4a7a-8101-068986ac9c3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.149279] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "cabc73d8-0304-4a7a-8101-068986ac9c3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1370.151531] env[62627]: INFO nova.compute.manager [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Terminating instance [ 1370.567302] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.567685] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.567848] env[62627]: DEBUG nova.objects.instance [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lazy-loading 'resources' on Instance uuid 7e340c99-8d15-4696-98d5-a952f5d2aeee {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1370.655618] env[62627]: DEBUG nova.compute.manager [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1370.655827] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1370.656730] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8a133e-1a3a-4624-ae82-3577dcf001a1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.664474] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1370.664734] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b74607dd-cc44-476a-9f53-6d8b110b3d75 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.673157] env[62627]: DEBUG oslo_vmware.api [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1370.673157] env[62627]: value = "task-2195460" [ 1370.673157] env[62627]: _type = "Task" [ 1370.673157] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.682094] env[62627]: DEBUG oslo_vmware.api [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195460, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.170021] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d6f519-cb4e-4e7e-85f2-68aebfc5dad0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.180203] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de50936d-17b0-4f98-9104-ed23e281baa1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.186094] env[62627]: DEBUG oslo_vmware.api [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195460, 'name': PowerOffVM_Task, 'duration_secs': 0.200083} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.186607] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1371.186852] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1371.187104] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82657a4a-57e4-4957-a4ab-b43df9c8eaf7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.213536] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d9b99c-a41c-4923-910a-7f694de62845 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.222602] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab647c9d-88e8-4ebf-815e-0ae075133832 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.238371] env[62627]: DEBUG nova.compute.provider_tree [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1371.249866] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1371.250083] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1371.250265] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleting the datastore file [datastore2] cabc73d8-0304-4a7a-8101-068986ac9c3e {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1371.250500] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfe78fcc-bb38-4f31-a8c9-8a8504e9149a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.256834] env[62627]: DEBUG oslo_vmware.api [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1371.256834] env[62627]: value = "task-2195462" [ 1371.256834] env[62627]: _type = "Task" [ 1371.256834] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.264575] env[62627]: DEBUG oslo_vmware.api [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.741147] env[62627]: DEBUG nova.scheduler.client.report [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1371.766158] env[62627]: DEBUG oslo_vmware.api [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14924} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.766414] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1371.766629] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1371.766816] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1371.766990] env[62627]: INFO nova.compute.manager [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1371.767241] env[62627]: DEBUG oslo.service.loopingcall [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1371.767426] env[62627]: DEBUG nova.compute.manager [-] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1371.767519] env[62627]: DEBUG nova.network.neutron [-] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1372.068444] env[62627]: DEBUG nova.compute.manager [req-dca2a52e-befc-476d-a0d6-c4ae0de5983b req-960dd3f5-3586-4657-ac2e-8fe9d6d975a4 service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Received event network-vif-deleted-2e697b33-f9f6-4771-a3e6-a129565bbd17 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1372.068650] env[62627]: INFO nova.compute.manager [req-dca2a52e-befc-476d-a0d6-c4ae0de5983b req-960dd3f5-3586-4657-ac2e-8fe9d6d975a4 service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Neutron deleted interface 2e697b33-f9f6-4771-a3e6-a129565bbd17; detaching it from the instance and deleting it from the info cache [ 1372.069322] env[62627]: DEBUG nova.network.neutron [req-dca2a52e-befc-476d-a0d6-c4ae0de5983b req-960dd3f5-3586-4657-ac2e-8fe9d6d975a4 service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.245687] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.678s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.267094] env[62627]: INFO nova.scheduler.client.report [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted allocations for instance 7e340c99-8d15-4696-98d5-a952f5d2aeee [ 1372.550975] env[62627]: DEBUG nova.network.neutron [-] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.570903] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-786e8de9-27ba-45b5-96cd-ff85e874f935 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.580860] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf8506a-4203-406c-9cfc-bc8c4ea42094 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.609370] env[62627]: DEBUG nova.compute.manager [req-dca2a52e-befc-476d-a0d6-c4ae0de5983b req-960dd3f5-3586-4657-ac2e-8fe9d6d975a4 service nova] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Detach interface failed, port_id=2e697b33-f9f6-4771-a3e6-a129565bbd17, reason: Instance cabc73d8-0304-4a7a-8101-068986ac9c3e could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1372.774130] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f65a8bba-9821-46d1-b0dd-191a291145fa tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "7e340c99-8d15-4696-98d5-a952f5d2aeee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.568s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.053920] env[62627]: INFO nova.compute.manager [-] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Took 1.29 seconds to deallocate network for instance. [ 1373.560634] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.560921] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.561167] env[62627]: DEBUG nova.objects.instance [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lazy-loading 'resources' on Instance uuid cabc73d8-0304-4a7a-8101-068986ac9c3e {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1374.150101] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92040e26-2647-4236-a063-1168aa6bfdf0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.157683] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3f1543-fcc6-4e5c-8da0-0dd48aa16034 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.186314] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6e8fa7-5793-4acc-b346-f90401272529 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.193682] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a962453f-ce33-411f-ab5f-4017f7bade90 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.206306] env[62627]: DEBUG nova.compute.provider_tree [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1374.709742] env[62627]: DEBUG nova.scheduler.client.report [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1375.214965] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.654s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.236029] env[62627]: INFO nova.scheduler.client.report [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted allocations for instance cabc73d8-0304-4a7a-8101-068986ac9c3e [ 1375.742831] env[62627]: DEBUG oslo_concurrency.lockutils [None req-8609f690-0c4d-4391-8230-6756f8fa7580 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "cabc73d8-0304-4a7a-8101-068986ac9c3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.594s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.733053] env[62627]: DEBUG oslo_concurrency.lockutils [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.733053] env[62627]: DEBUG oslo_concurrency.lockutils [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.733469] env[62627]: DEBUG oslo_concurrency.lockutils [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.733469] env[62627]: DEBUG oslo_concurrency.lockutils [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.733546] env[62627]: DEBUG oslo_concurrency.lockutils [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.735469] env[62627]: INFO nova.compute.manager [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Terminating instance [ 1376.936867] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.937174] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.937768] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.937976] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.938169] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.940102] env[62627]: INFO nova.compute.manager [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Terminating instance [ 1377.239213] env[62627]: DEBUG nova.compute.manager [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1377.239490] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1377.240406] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f99e47c-423d-4e83-9923-50916436fa4e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.248228] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1377.248455] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48d31fa7-17ef-400a-acbf-f1f92ea3e870 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.254768] env[62627]: DEBUG oslo_vmware.api [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1377.254768] env[62627]: value = "task-2195464" [ 1377.254768] env[62627]: _type = "Task" [ 1377.254768] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.262410] env[62627]: DEBUG oslo_vmware.api [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.443658] env[62627]: DEBUG nova.compute.manager [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1377.444495] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1377.444924] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5561d8-e229-465b-8ce5-dd94f2ffb8cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.452348] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1377.452580] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de8e3030-975f-4607-8fa7-a67c5052a536 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.458901] env[62627]: DEBUG oslo_vmware.api [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1377.458901] env[62627]: value = "task-2195465" [ 1377.458901] env[62627]: _type = "Task" [ 1377.458901] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.466757] env[62627]: DEBUG oslo_vmware.api [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195465, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.765789] env[62627]: DEBUG oslo_vmware.api [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195464, 'name': PowerOffVM_Task, 'duration_secs': 0.177109} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.766143] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1377.766269] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1377.766517] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e047fd04-3528-4dcb-8ba8-0b9d6a207dfd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.828866] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1377.829092] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1377.829284] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleting the datastore file [datastore2] 3b1e8b81-7d18-498a-b35e-8ab1c87374c8 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1377.829599] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfb6ca4f-6119-435d-9dcd-cea9950acc6c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.836355] env[62627]: DEBUG oslo_vmware.api [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1377.836355] env[62627]: value = "task-2195467" [ 1377.836355] env[62627]: _type = "Task" [ 1377.836355] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.843863] env[62627]: DEBUG oslo_vmware.api [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.968481] env[62627]: DEBUG oslo_vmware.api [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195465, 'name': PowerOffVM_Task, 'duration_secs': 0.179306} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.968759] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1377.968932] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1377.969203] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-824f2fb7-6aa7-49af-a569-a293ccabdcaf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.030390] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1378.030593] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1378.030769] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleting the datastore file [datastore2] 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1378.031047] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ef5af1c-bd6f-4f2a-9512-d5e16f2391b1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.038346] env[62627]: DEBUG oslo_vmware.api [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for the task: (returnval){ [ 1378.038346] env[62627]: value = "task-2195469" [ 1378.038346] env[62627]: _type = "Task" [ 1378.038346] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.046511] env[62627]: DEBUG oslo_vmware.api [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195469, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.346290] env[62627]: DEBUG oslo_vmware.api [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195467, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140499} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.346536] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1378.346724] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1378.346903] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1378.347090] env[62627]: INFO nova.compute.manager [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1378.347330] env[62627]: DEBUG oslo.service.loopingcall [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1378.347525] env[62627]: DEBUG nova.compute.manager [-] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1378.347617] env[62627]: DEBUG nova.network.neutron [-] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1378.548862] env[62627]: DEBUG oslo_vmware.api [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Task: {'id': task-2195469, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125821} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.549527] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1378.549741] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1378.549931] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1378.550166] env[62627]: INFO nova.compute.manager [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1378.550514] env[62627]: DEBUG oslo.service.loopingcall [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1378.550727] env[62627]: DEBUG nova.compute.manager [-] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1378.550823] env[62627]: DEBUG nova.network.neutron [-] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1378.596241] env[62627]: DEBUG nova.compute.manager [req-b4a80e97-60cd-40b4-86af-f8b5c5f1e06f req-96cd45ed-d8db-41a3-b88a-1dc7faf1f9ab service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Received event network-vif-deleted-f488b8ce-9270-489a-862c-8166c83c1bdc {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1378.596241] env[62627]: INFO nova.compute.manager [req-b4a80e97-60cd-40b4-86af-f8b5c5f1e06f req-96cd45ed-d8db-41a3-b88a-1dc7faf1f9ab service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Neutron deleted interface f488b8ce-9270-489a-862c-8166c83c1bdc; detaching it from the instance and deleting it from the info cache [ 1378.596241] env[62627]: DEBUG nova.network.neutron [req-b4a80e97-60cd-40b4-86af-f8b5c5f1e06f req-96cd45ed-d8db-41a3-b88a-1dc7faf1f9ab service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.060797] env[62627]: DEBUG nova.compute.manager [req-147d2edb-40c6-4d9e-8f10-3f1c461f443b req-74ad3301-cfe5-4b71-a7ac-3e46143edcc4 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Received event network-vif-deleted-b0e730ac-28a5-40f0-bbe1-c4f48f060624 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1379.061061] env[62627]: INFO nova.compute.manager [req-147d2edb-40c6-4d9e-8f10-3f1c461f443b req-74ad3301-cfe5-4b71-a7ac-3e46143edcc4 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Neutron deleted interface b0e730ac-28a5-40f0-bbe1-c4f48f060624; detaching it from the instance and deleting it from the info cache [ 1379.061354] env[62627]: DEBUG nova.network.neutron [req-147d2edb-40c6-4d9e-8f10-3f1c461f443b req-74ad3301-cfe5-4b71-a7ac-3e46143edcc4 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.078815] env[62627]: DEBUG nova.network.neutron [-] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.099147] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-703da3ba-0fb3-49e6-ab38-f5d5c5d5abfb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.109503] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae5fb48-9d02-43ee-8d89-b4191b78278f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.135107] env[62627]: DEBUG nova.compute.manager [req-b4a80e97-60cd-40b4-86af-f8b5c5f1e06f req-96cd45ed-d8db-41a3-b88a-1dc7faf1f9ab service nova] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Detach interface failed, port_id=f488b8ce-9270-489a-862c-8166c83c1bdc, reason: Instance 3b1e8b81-7d18-498a-b35e-8ab1c87374c8 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1379.519024] env[62627]: DEBUG nova.network.neutron [-] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.564091] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ff8786a-a558-4790-9c40-82dbfb97b4f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.574193] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2974ecfe-315b-4ffc-9684-5c2907d91ef4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.584698] env[62627]: INFO nova.compute.manager [-] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Took 1.24 seconds to deallocate network for instance. [ 1379.601666] env[62627]: DEBUG nova.compute.manager [req-147d2edb-40c6-4d9e-8f10-3f1c461f443b req-74ad3301-cfe5-4b71-a7ac-3e46143edcc4 service nova] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Detach interface failed, port_id=b0e730ac-28a5-40f0-bbe1-c4f48f060624, reason: Instance 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1380.021721] env[62627]: INFO nova.compute.manager [-] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Took 1.47 seconds to deallocate network for instance. [ 1380.093047] env[62627]: DEBUG oslo_concurrency.lockutils [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.093047] env[62627]: DEBUG oslo_concurrency.lockutils [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.093047] env[62627]: DEBUG nova.objects.instance [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lazy-loading 'resources' on Instance uuid 3b1e8b81-7d18-498a-b35e-8ab1c87374c8 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1380.528516] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.677164] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2935d5-5f46-462b-b72c-acba0800e939 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.687500] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5959a6e8-41b5-4aa5-aeca-d52e690b0a57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.719074] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8549a74e-7952-44e2-b5d2-e02ad2ab374d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.726255] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6bdbd3f-17ee-48b2-b38c-604fcb6f32a6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.739220] env[62627]: DEBUG nova.compute.provider_tree [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.242092] env[62627]: DEBUG nova.scheduler.client.report [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1381.748945] env[62627]: DEBUG oslo_concurrency.lockutils [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.656s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.751674] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.223s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.751989] env[62627]: DEBUG nova.objects.instance [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lazy-loading 'resources' on Instance uuid 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1381.766529] env[62627]: INFO nova.scheduler.client.report [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted allocations for instance 3b1e8b81-7d18-498a-b35e-8ab1c87374c8 [ 1382.273098] env[62627]: DEBUG oslo_concurrency.lockutils [None req-41f043ef-4c62-4218-85ae-46a466215994 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "3b1e8b81-7d18-498a-b35e-8ab1c87374c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.540s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.320107] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3b6151-717b-4aa3-9c27-f2fb12d3924d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.328084] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c136a03-be11-497a-a6ba-0751fd8deffa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.357976] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba2fade-9d49-472e-ae66-904f9d27be48 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.365069] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a1f0c7-f4e8-4f1c-8d87-5bfb4c5f87a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.378177] env[62627]: DEBUG nova.compute.provider_tree [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1382.881576] env[62627]: DEBUG nova.scheduler.client.report [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1383.386883] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.635s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.404481] env[62627]: INFO nova.scheduler.client.report [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Deleted allocations for instance 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a [ 1383.696336] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.696579] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.911849] env[62627]: DEBUG oslo_concurrency.lockutils [None req-3bee67f9-277d-4c17-8d37-28a03c181e1b tempest-ServerActionsTestOtherA-1289442600 tempest-ServerActionsTestOtherA-1289442600-project-member] Lock "6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.975s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.198920] env[62627]: DEBUG nova.compute.manager [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1384.717560] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.720098] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.720098] env[62627]: INFO nova.compute.claims [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1385.307338] env[62627]: DEBUG nova.compute.manager [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Stashing vm_state: active {{(pid=62627) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1385.826710] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1c090b-50a5-4f1d-92ee-007e35180c78 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.832212] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.839731] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31091e4-b98d-42b0-b3e8-6f726a00f4b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.872402] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2187e8f3-d735-4522-97d1-4481fc6b01fa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.879989] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ec2453-d0a5-497d-a0c4-9d94e15abf4d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.893407] env[62627]: DEBUG nova.compute.provider_tree [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.398896] env[62627]: DEBUG nova.scheduler.client.report [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1386.903474] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.185s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.904023] env[62627]: DEBUG nova.compute.manager [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1386.906666] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.075s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.416020] env[62627]: DEBUG nova.compute.utils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1387.417215] env[62627]: INFO nova.compute.claims [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1387.428236] env[62627]: DEBUG nova.compute.manager [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1387.428236] env[62627]: DEBUG nova.network.neutron [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1387.490076] env[62627]: DEBUG nova.policy [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd847ab3ec20743d0997defea8008448e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '710e7463994b4ec08bcc6d479cd1138b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1387.923749] env[62627]: DEBUG nova.compute.manager [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1387.931488] env[62627]: INFO nova.compute.resource_tracker [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating resource usage from migration 0b97dcea-30da-4655-bf24-00a8e55b165d [ 1388.024077] env[62627]: DEBUG nova.network.neutron [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Successfully created port: 8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1388.043098] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7bc4eb8-a068-4c6e-aaea-232c4e5497e8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.052738] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f61a87-5e48-415c-9d83-48aaf0e79a46 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.084436] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1540836-8bab-456a-a15d-d97284a94fdc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.092741] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc566f2-5233-48ec-bdcc-456e9905f958 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.107720] env[62627]: DEBUG nova.compute.provider_tree [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1388.611698] env[62627]: DEBUG nova.scheduler.client.report [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1388.936604] env[62627]: DEBUG nova.compute.manager [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1388.965449] env[62627]: DEBUG nova.virt.hardware [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1388.965696] env[62627]: DEBUG nova.virt.hardware [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1388.965982] env[62627]: DEBUG nova.virt.hardware [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1388.966199] env[62627]: DEBUG nova.virt.hardware [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1388.966352] env[62627]: DEBUG nova.virt.hardware [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1388.966501] env[62627]: DEBUG nova.virt.hardware [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1388.966713] env[62627]: DEBUG nova.virt.hardware [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1388.966874] env[62627]: DEBUG nova.virt.hardware [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1388.967054] env[62627]: DEBUG nova.virt.hardware [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1388.967223] env[62627]: DEBUG nova.virt.hardware [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1388.967394] env[62627]: DEBUG nova.virt.hardware [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1388.968263] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fc7112-2b90-4769-b075-c4f1eb7a8c95 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.976692] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b6d648-f0c4-4c69-9dba-e9ffb1238f11 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.116702] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.209s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.116702] env[62627]: INFO nova.compute.manager [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Migrating [ 1389.636031] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.636031] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.636031] env[62627]: DEBUG nova.network.neutron [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1389.673449] env[62627]: DEBUG nova.compute.manager [req-19bbcb0b-7511-4641-a64d-567aab9dc75c req-fa389164-0a4d-4167-837c-6b910dd8e11b service nova] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Received event network-vif-plugged-8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1389.674017] env[62627]: DEBUG oslo_concurrency.lockutils [req-19bbcb0b-7511-4641-a64d-567aab9dc75c req-fa389164-0a4d-4167-837c-6b910dd8e11b service nova] Acquiring lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.674017] env[62627]: DEBUG oslo_concurrency.lockutils [req-19bbcb0b-7511-4641-a64d-567aab9dc75c req-fa389164-0a4d-4167-837c-6b910dd8e11b service nova] Lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.674017] env[62627]: DEBUG oslo_concurrency.lockutils [req-19bbcb0b-7511-4641-a64d-567aab9dc75c req-fa389164-0a4d-4167-837c-6b910dd8e11b service nova] Lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.674410] env[62627]: DEBUG nova.compute.manager [req-19bbcb0b-7511-4641-a64d-567aab9dc75c req-fa389164-0a4d-4167-837c-6b910dd8e11b service nova] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] No waiting events found dispatching network-vif-plugged-8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1389.674410] env[62627]: WARNING nova.compute.manager [req-19bbcb0b-7511-4641-a64d-567aab9dc75c req-fa389164-0a4d-4167-837c-6b910dd8e11b service nova] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Received unexpected event network-vif-plugged-8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a for instance with vm_state building and task_state spawning. [ 1389.738879] env[62627]: DEBUG nova.network.neutron [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Successfully updated port: 8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1390.231656] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "77b68106-1abe-426b-89ce-3495776b9233" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.231955] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.245447] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "refresh_cache-c90dcd2f-c718-41b2-912f-00fa0fe69b68" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.245625] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "refresh_cache-c90dcd2f-c718-41b2-912f-00fa0fe69b68" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.245819] env[62627]: DEBUG nova.network.neutron [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1390.426361] env[62627]: DEBUG nova.network.neutron [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance_info_cache with network_info: [{"id": "4cbd817b-c3ef-4a33-8648-4297830602e0", "address": "fa:16:3e:e3:f3:a5", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cbd817b-c3", "ovs_interfaceid": "4cbd817b-c3ef-4a33-8648-4297830602e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.591700] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.591915] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.655694] env[62627]: DEBUG oslo_concurrency.lockutils [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.655882] env[62627]: DEBUG oslo_concurrency.lockutils [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.656081] env[62627]: INFO nova.compute.manager [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Shelving [ 1390.734301] env[62627]: DEBUG nova.compute.manager [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1390.782294] env[62627]: DEBUG nova.network.neutron [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1390.901831] env[62627]: DEBUG nova.network.neutron [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Updating instance_info_cache with network_info: [{"id": "8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a", "address": "fa:16:3e:37:6b:7c", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fa4a67a-bc", "ovs_interfaceid": "8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.928904] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.096694] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.096848] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1391.096965] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Rebuilding the list of instances to heal {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1391.255905] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.256327] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.257709] env[62627]: INFO nova.compute.claims [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1391.404864] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "refresh_cache-c90dcd2f-c718-41b2-912f-00fa0fe69b68" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.405217] env[62627]: DEBUG nova.compute.manager [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Instance network_info: |[{"id": "8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a", "address": "fa:16:3e:37:6b:7c", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fa4a67a-bc", "ovs_interfaceid": "8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1391.405709] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:6b:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a94c29-ddd5-4383-9219-1c2c3bb09cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1391.414048] env[62627]: DEBUG oslo.service.loopingcall [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1391.414281] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1391.414512] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9623274b-7ebd-4c49-8335-53808a42b7a3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.435326] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1391.435326] env[62627]: value = "task-2195470" [ 1391.435326] env[62627]: _type = "Task" [ 1391.435326] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.443325] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195470, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.602725] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Skipping network cache update for instance because it is Building. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1391.633206] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "refresh_cache-00bb8c04-3c0b-48ff-8127-13d3e15cb86c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.634031] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquired lock "refresh_cache-00bb8c04-3c0b-48ff-8127-13d3e15cb86c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.634031] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Forcefully refreshing network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1391.634031] env[62627]: DEBUG nova.objects.instance [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lazy-loading 'info_cache' on Instance uuid 00bb8c04-3c0b-48ff-8127-13d3e15cb86c {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1391.665049] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1391.665049] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-547d3aa2-a817-47c2-b86a-c28ba2bf6579 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.671882] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1391.671882] env[62627]: value = "task-2195471" [ 1391.671882] env[62627]: _type = "Task" [ 1391.671882] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.682211] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.700720] env[62627]: DEBUG nova.compute.manager [req-886d1712-e311-4000-9a65-7e6dbb559889 req-ccccbc2c-818f-40e4-8398-ddfdd62344be service nova] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Received event network-changed-8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1391.700720] env[62627]: DEBUG nova.compute.manager [req-886d1712-e311-4000-9a65-7e6dbb559889 req-ccccbc2c-818f-40e4-8398-ddfdd62344be service nova] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Refreshing instance network info cache due to event network-changed-8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1391.701017] env[62627]: DEBUG oslo_concurrency.lockutils [req-886d1712-e311-4000-9a65-7e6dbb559889 req-ccccbc2c-818f-40e4-8398-ddfdd62344be service nova] Acquiring lock "refresh_cache-c90dcd2f-c718-41b2-912f-00fa0fe69b68" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.701239] env[62627]: DEBUG oslo_concurrency.lockutils [req-886d1712-e311-4000-9a65-7e6dbb559889 req-ccccbc2c-818f-40e4-8398-ddfdd62344be service nova] Acquired lock "refresh_cache-c90dcd2f-c718-41b2-912f-00fa0fe69b68" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.701443] env[62627]: DEBUG nova.network.neutron [req-886d1712-e311-4000-9a65-7e6dbb559889 req-ccccbc2c-818f-40e4-8398-ddfdd62344be service nova] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Refreshing network info cache for port 8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1391.948256] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195470, 'name': CreateVM_Task, 'duration_secs': 0.287436} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.948941] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1391.949628] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.949805] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.950141] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1391.950602] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dd7a4ce-3a41-46e3-aab5-c1092622d3aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.954908] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1391.954908] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529be565-54f7-2083-e218-b5ec3e39a6eb" [ 1391.954908] env[62627]: _type = "Task" [ 1391.954908] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.962296] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529be565-54f7-2083-e218-b5ec3e39a6eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.182104] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195471, 'name': PowerOffVM_Task, 'duration_secs': 0.186006} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.182104] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1392.182919] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef2107b-41df-4225-ac16-34faa875a251 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.200790] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd1e6e0-2472-43e9-81c5-c431827d3661 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.352186] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d058d0-95e3-4a7d-b5ba-f6c9cc3a1b46 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.359752] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ff2725-7eff-465c-96f7-9c759b0330d5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.389864] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd2fda5-7999-4d7c-b3a6-69cb72932185 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.396537] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd4f33e-4713-448c-9fbb-d27074972aac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.410518] env[62627]: DEBUG nova.compute.provider_tree [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1392.419061] env[62627]: DEBUG nova.network.neutron [req-886d1712-e311-4000-9a65-7e6dbb559889 req-ccccbc2c-818f-40e4-8398-ddfdd62344be service nova] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Updated VIF entry in instance network info cache for port 8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1392.419397] env[62627]: DEBUG nova.network.neutron [req-886d1712-e311-4000-9a65-7e6dbb559889 req-ccccbc2c-818f-40e4-8398-ddfdd62344be service nova] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Updating instance_info_cache with network_info: [{"id": "8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a", "address": "fa:16:3e:37:6b:7c", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fa4a67a-bc", "ovs_interfaceid": "8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1392.446435] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe1a274-59e2-4064-8d42-f237d1eeae20 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.464351] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance '16ed0637-d24d-488d-b9ba-dd5a49a7b2a2' progress to 0 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1392.476037] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529be565-54f7-2083-e218-b5ec3e39a6eb, 'name': SearchDatastore_Task, 'duration_secs': 0.009651} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.476340] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.476620] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1392.476798] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.476947] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.477140] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1392.477391] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f096f29-cc11-4f7e-ac36-0d337bc93ac0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.485545] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1392.485814] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1392.486454] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8f56ed7-d1ac-4dd6-b407-02d420135cb6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.492210] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1392.492210] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52378bb2-8738-644a-9ae7-6a28b01af7ab" [ 1392.492210] env[62627]: _type = "Task" [ 1392.492210] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.499660] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52378bb2-8738-644a-9ae7-6a28b01af7ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.711598] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1392.711939] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-dbaf399a-9d61-4ca0-aefe-b8a04c52d2e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.720787] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1392.720787] env[62627]: value = "task-2195472" [ 1392.720787] env[62627]: _type = "Task" [ 1392.720787] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.729469] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195472, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.913542] env[62627]: DEBUG nova.scheduler.client.report [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1392.921681] env[62627]: DEBUG oslo_concurrency.lockutils [req-886d1712-e311-4000-9a65-7e6dbb559889 req-ccccbc2c-818f-40e4-8398-ddfdd62344be service nova] Releasing lock "refresh_cache-c90dcd2f-c718-41b2-912f-00fa0fe69b68" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.973636] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1392.973987] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5af8b0c4-0232-488c-a33c-db641f21d6df {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.981930] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1392.981930] env[62627]: value = "task-2195473" [ 1392.981930] env[62627]: _type = "Task" [ 1392.981930] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.990964] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195473, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.002648] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52378bb2-8738-644a-9ae7-6a28b01af7ab, 'name': SearchDatastore_Task, 'duration_secs': 0.009036} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.003455] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-983bf368-3c3a-4022-b6ad-84c681892ea2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.008360] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1393.008360] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224c68c-62d0-c357-3938-e7eee5dcf063" [ 1393.008360] env[62627]: _type = "Task" [ 1393.008360] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.016222] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224c68c-62d0-c357-3938-e7eee5dcf063, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.232231] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195472, 'name': CreateSnapshot_Task, 'duration_secs': 0.435377} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.232532] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1393.233289] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b48fc8-6878-4f31-b514-37787a3f27a4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.376538] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Updating instance_info_cache with network_info: [{"id": "d1a62857-50ae-4f10-8206-bfd58d9da6e0", "address": "fa:16:3e:c0:9a:7b", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1a62857-50", "ovs_interfaceid": "d1a62857-50ae-4f10-8206-bfd58d9da6e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.418118] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.162s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.418625] env[62627]: DEBUG nova.compute.manager [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1393.493773] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195473, 'name': PowerOffVM_Task, 'duration_secs': 0.226156} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.494052] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1393.494244] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance '16ed0637-d24d-488d-b9ba-dd5a49a7b2a2' progress to 17 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1393.522026] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5224c68c-62d0-c357-3938-e7eee5dcf063, 'name': SearchDatastore_Task, 'duration_secs': 0.011019} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.522026] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.522026] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] c90dcd2f-c718-41b2-912f-00fa0fe69b68/c90dcd2f-c718-41b2-912f-00fa0fe69b68.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1393.522026] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cdba921-d897-4058-9ca8-20a685829de8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.526743] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1393.526743] env[62627]: value = "task-2195474" [ 1393.526743] env[62627]: _type = "Task" [ 1393.526743] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.534245] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195474, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.755277] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1393.755751] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-addbca8b-be9e-4f86-b6a2-e5a2ebc9450e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.768403] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1393.768403] env[62627]: value = "task-2195475" [ 1393.768403] env[62627]: _type = "Task" [ 1393.768403] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.781253] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195475, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.879096] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Releasing lock "refresh_cache-00bb8c04-3c0b-48ff-8127-13d3e15cb86c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.879333] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Updated the network info_cache for instance {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1393.879557] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.879886] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.880127] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.880296] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.880449] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.880655] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.880772] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1393.880935] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.923912] env[62627]: DEBUG nova.compute.utils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1393.925748] env[62627]: DEBUG nova.compute.manager [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1393.926013] env[62627]: DEBUG nova.network.neutron [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1393.971995] env[62627]: DEBUG nova.policy [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9744688d4c8c41e6a3091a27f8f63104', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6095198e22f645698a7c566414eba47c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1394.000978] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1394.001263] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1394.001427] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1394.001621] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1394.001895] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1394.001895] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1394.002104] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1394.002282] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1394.002439] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1394.002613] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1394.002788] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1394.009964] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ade8c8b-f93d-47a4-8c95-8af352812724 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.027101] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1394.027101] env[62627]: value = "task-2195476" [ 1394.027101] env[62627]: _type = "Task" [ 1394.027101] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.043628] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195476, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.048236] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195474, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458282} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.048668] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] c90dcd2f-c718-41b2-912f-00fa0fe69b68/c90dcd2f-c718-41b2-912f-00fa0fe69b68.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1394.049097] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1394.049541] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a329bb49-5ccc-43b2-b3a0-75a806539672 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.057924] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1394.057924] env[62627]: value = "task-2195477" [ 1394.057924] env[62627]: _type = "Task" [ 1394.057924] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.067236] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195477, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.253781] env[62627]: DEBUG nova.network.neutron [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Successfully created port: 54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1394.278556] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195475, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.384966] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.385348] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.385464] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.385620] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1394.386781] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000d6f7d-9475-4067-a028-28dd806c71a0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.394470] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeee1177-dec7-4548-8888-b463558673d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.410259] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969b426d-f398-42ea-826f-8dd5e0a51962 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.416972] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b39e3a-b02b-4886-a837-a99d0127e1b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.446842] env[62627]: DEBUG nova.compute.manager [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1394.451147] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180161MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1394.451147] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.451147] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.538528] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195476, 'name': ReconfigVM_Task, 'duration_secs': 0.361226} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.538913] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance '16ed0637-d24d-488d-b9ba-dd5a49a7b2a2' progress to 33 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1394.567518] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195477, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070459} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.567891] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1394.568719] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6a7bf6-678a-4a24-94f3-90ce065eaa2f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.591880] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] c90dcd2f-c718-41b2-912f-00fa0fe69b68/c90dcd2f-c718-41b2-912f-00fa0fe69b68.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1394.592881] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72fb2093-184c-4a6b-bab9-629386fc7906 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.612198] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1394.612198] env[62627]: value = "task-2195478" [ 1394.612198] env[62627]: _type = "Task" [ 1394.612198] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.619979] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195478, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.781515] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195475, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.046242] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1395.046504] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1395.046667] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1395.046857] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1395.047042] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1395.047199] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1395.047405] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1395.047607] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1395.047839] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1395.048032] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1395.048220] env[62627]: DEBUG nova.virt.hardware [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1395.053660] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1395.053987] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6f0cff4-8799-4786-b190-2187e705777e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.073177] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1395.073177] env[62627]: value = "task-2195479" [ 1395.073177] env[62627]: _type = "Task" [ 1395.073177] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.081919] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195479, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.123326] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195478, 'name': ReconfigVM_Task, 'duration_secs': 0.419549} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.123326] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Reconfigured VM instance instance-00000075 to attach disk [datastore2] c90dcd2f-c718-41b2-912f-00fa0fe69b68/c90dcd2f-c718-41b2-912f-00fa0fe69b68.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1395.123708] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7454c72d-8c09-4400-8296-452d2782b28e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.131152] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1395.131152] env[62627]: value = "task-2195480" [ 1395.131152] env[62627]: _type = "Task" [ 1395.131152] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.143820] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195480, 'name': Rename_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.280063] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195475, 'name': CloneVM_Task, 'duration_secs': 1.409761} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.280063] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Created linked-clone VM from snapshot [ 1395.280788] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65d720c-04c5-4402-9cd4-f2991ba83f1b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.287924] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Uploading image 96863610-a259-42f5-aff5-eef943337e28 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1395.312852] env[62627]: DEBUG oslo_vmware.rw_handles [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1395.312852] env[62627]: value = "vm-447847" [ 1395.312852] env[62627]: _type = "VirtualMachine" [ 1395.312852] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1395.313099] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-883cf391-4155-4ba2-b083-cbe1251c5bcc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.320556] env[62627]: DEBUG oslo_vmware.rw_handles [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lease: (returnval){ [ 1395.320556] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522e4438-45a7-845c-ff57-7500606c8c75" [ 1395.320556] env[62627]: _type = "HttpNfcLease" [ 1395.320556] env[62627]: } obtained for exporting VM: (result){ [ 1395.320556] env[62627]: value = "vm-447847" [ 1395.320556] env[62627]: _type = "VirtualMachine" [ 1395.320556] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1395.320828] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the lease: (returnval){ [ 1395.320828] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522e4438-45a7-845c-ff57-7500606c8c75" [ 1395.320828] env[62627]: _type = "HttpNfcLease" [ 1395.320828] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1395.326550] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1395.326550] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522e4438-45a7-845c-ff57-7500606c8c75" [ 1395.326550] env[62627]: _type = "HttpNfcLease" [ 1395.326550] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1395.461494] env[62627]: DEBUG nova.compute.manager [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1395.464537] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Applying migration context for instance 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2 as it has an incoming, in-progress migration 0b97dcea-30da-4655-bf24-00a8e55b165d. Migration status is migrating {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1395.465381] env[62627]: INFO nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating resource usage from migration 0b97dcea-30da-4655-bf24-00a8e55b165d [ 1395.484652] env[62627]: DEBUG nova.virt.hardware [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1395.484896] env[62627]: DEBUG nova.virt.hardware [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1395.485065] env[62627]: DEBUG nova.virt.hardware [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1395.485253] env[62627]: DEBUG nova.virt.hardware [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1395.485402] env[62627]: DEBUG nova.virt.hardware [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1395.485552] env[62627]: DEBUG nova.virt.hardware [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1395.485753] env[62627]: DEBUG nova.virt.hardware [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1395.485912] env[62627]: DEBUG nova.virt.hardware [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1395.486156] env[62627]: DEBUG nova.virt.hardware [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1395.486402] env[62627]: DEBUG nova.virt.hardware [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1395.486515] env[62627]: DEBUG nova.virt.hardware [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1395.487636] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4396a6f-1898-42c3-a0c3-a84b71f44b9e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.491029] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 00bb8c04-3c0b-48ff-8127-13d3e15cb86c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1395.491029] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 8c170084-a7f8-4774-8f9d-7874103b0f4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1395.491156] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1395.491233] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance c90dcd2f-c718-41b2-912f-00fa0fe69b68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1395.491332] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Migration 0b97dcea-30da-4655-bf24-00a8e55b165d is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1395.491446] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1395.491558] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 77b68106-1abe-426b-89ce-3495776b9233 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1395.491733] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1395.491868] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1395.499449] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b19300-9680-4f4f-9747-bb3fd98b6480 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.570578] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.570850] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.585873] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195479, 'name': ReconfigVM_Task, 'duration_secs': 0.166236} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.586134] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1395.587130] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee66bbd5-075d-4285-80fe-a7aa846963a4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.591075] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94adb200-1419-4b84-b00b-68ecab4f4d29 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.614734] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2/16ed0637-d24d-488d-b9ba-dd5a49a7b2a2.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1395.616834] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4aa2052-ba60-437f-b2a6-89018fff08db {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.631077] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb296fe-a9c3-4303-9e0f-20d09c66f9e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.639414] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1395.639414] env[62627]: value = "task-2195482" [ 1395.639414] env[62627]: _type = "Task" [ 1395.639414] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.669404] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195480, 'name': Rename_Task, 'duration_secs': 0.129574} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.673015] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd452d7-2f62-47cd-bed0-51472a35ad9c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.675386] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1395.675629] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d654b7f1-b7e7-4884-a91b-552a027ac892 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.682492] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195482, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.685382] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1395.685382] env[62627]: value = "task-2195483" [ 1395.685382] env[62627]: _type = "Task" [ 1395.685382] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.686666] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0919c6-51e3-4641-9ec7-460eec73be57 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.698045] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195483, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.705911] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1395.708906] env[62627]: DEBUG nova.compute.manager [req-aa5d7349-76ef-4195-a231-0676b40c82e7 req-a7273852-0fd4-4913-ae63-047d628ea83a service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Received event network-vif-plugged-54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1395.709146] env[62627]: DEBUG oslo_concurrency.lockutils [req-aa5d7349-76ef-4195-a231-0676b40c82e7 req-a7273852-0fd4-4913-ae63-047d628ea83a service nova] Acquiring lock "77b68106-1abe-426b-89ce-3495776b9233-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.709400] env[62627]: DEBUG oslo_concurrency.lockutils [req-aa5d7349-76ef-4195-a231-0676b40c82e7 req-a7273852-0fd4-4913-ae63-047d628ea83a service nova] Lock "77b68106-1abe-426b-89ce-3495776b9233-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.709527] env[62627]: DEBUG oslo_concurrency.lockutils [req-aa5d7349-76ef-4195-a231-0676b40c82e7 req-a7273852-0fd4-4913-ae63-047d628ea83a service nova] Lock "77b68106-1abe-426b-89ce-3495776b9233-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.709737] env[62627]: DEBUG nova.compute.manager [req-aa5d7349-76ef-4195-a231-0676b40c82e7 req-a7273852-0fd4-4913-ae63-047d628ea83a service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] No waiting events found dispatching network-vif-plugged-54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1395.709953] env[62627]: WARNING nova.compute.manager [req-aa5d7349-76ef-4195-a231-0676b40c82e7 req-a7273852-0fd4-4913-ae63-047d628ea83a service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Received unexpected event network-vif-plugged-54f0bc09-a4b0-47de-a805-27b17f146e00 for instance with vm_state building and task_state spawning. [ 1395.797113] env[62627]: DEBUG nova.network.neutron [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Successfully updated port: 54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1395.829789] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1395.829789] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522e4438-45a7-845c-ff57-7500606c8c75" [ 1395.829789] env[62627]: _type = "HttpNfcLease" [ 1395.829789] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1395.830240] env[62627]: DEBUG oslo_vmware.rw_handles [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1395.830240] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522e4438-45a7-845c-ff57-7500606c8c75" [ 1395.830240] env[62627]: _type = "HttpNfcLease" [ 1395.830240] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1395.831062] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80ad1d7-5750-4ce4-91b0-07117f838abc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.839416] env[62627]: DEBUG oslo_vmware.rw_handles [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b060bf-1b0f-2b8f-1e17-00cb666239d1/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1395.839592] env[62627]: DEBUG oslo_vmware.rw_handles [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b060bf-1b0f-2b8f-1e17-00cb666239d1/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1395.938223] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f0f8c420-0a5b-49b1-aa04-a37e72556b43 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.074585] env[62627]: DEBUG nova.compute.utils [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1396.178299] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195482, 'name': ReconfigVM_Task, 'duration_secs': 0.267012} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.178593] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2/16ed0637-d24d-488d-b9ba-dd5a49a7b2a2.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1396.178911] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance '16ed0637-d24d-488d-b9ba-dd5a49a7b2a2' progress to 50 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1396.194185] env[62627]: DEBUG oslo_vmware.api [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195483, 'name': PowerOnVM_Task, 'duration_secs': 0.456582} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.194541] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1396.194857] env[62627]: INFO nova.compute.manager [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Took 7.26 seconds to spawn the instance on the hypervisor. [ 1396.195162] env[62627]: DEBUG nova.compute.manager [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1396.195958] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c983b289-2a65-470d-9279-fa6a199251f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.211418] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1396.299759] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.300022] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.300232] env[62627]: DEBUG nova.network.neutron [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1396.577672] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.685885] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99248dee-0b07-4e8f-9a3e-598a82077221 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.705779] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bc5255-b77f-4d2d-8db3-f5de2f10c54c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.729891] env[62627]: INFO nova.compute.manager [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Took 12.03 seconds to build instance. [ 1396.731343] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1396.732036] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.281s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.732036] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance '16ed0637-d24d-488d-b9ba-dd5a49a7b2a2' progress to 67 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1396.837274] env[62627]: DEBUG nova.network.neutron [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1396.982310] env[62627]: DEBUG nova.network.neutron [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating instance_info_cache with network_info: [{"id": "54f0bc09-a4b0-47de-a805-27b17f146e00", "address": "fa:16:3e:50:f4:19", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54f0bc09-a4", "ovs_interfaceid": "54f0bc09-a4b0-47de-a805-27b17f146e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.235885] env[62627]: DEBUG oslo_concurrency.lockutils [None req-ba20ff63-c4f4-4b2f-b4f3-61b363717bf9 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.539s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.275652] env[62627]: DEBUG nova.network.neutron [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Port 4cbd817b-c3ef-4a33-8648-4297830602e0 binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1397.304757] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.305156] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.305413] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.305614] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.306047] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.308236] env[62627]: INFO nova.compute.manager [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Terminating instance [ 1397.484886] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.485446] env[62627]: DEBUG nova.compute.manager [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Instance network_info: |[{"id": "54f0bc09-a4b0-47de-a805-27b17f146e00", "address": "fa:16:3e:50:f4:19", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54f0bc09-a4", "ovs_interfaceid": "54f0bc09-a4b0-47de-a805-27b17f146e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1397.485944] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:f4:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd38fdec-d092-4a84-ab41-685f6dbb4f29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54f0bc09-a4b0-47de-a805-27b17f146e00', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1397.494612] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating folder: Project (6095198e22f645698a7c566414eba47c). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1397.495032] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a555f543-99c5-4dec-b610-b51cc265ae3d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.506951] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Created folder: Project (6095198e22f645698a7c566414eba47c) in parent group-v447541. [ 1397.507291] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating folder: Instances. Parent ref: group-v447848. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1397.507637] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2eb2c051-cd22-47da-ad97-88905915d480 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.518867] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Created folder: Instances in parent group-v447848. [ 1397.519173] env[62627]: DEBUG oslo.service.loopingcall [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1397.519467] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1397.519793] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-915c19de-5584-4ced-9e47-577ebce6d754 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.539834] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1397.539834] env[62627]: value = "task-2195486" [ 1397.539834] env[62627]: _type = "Task" [ 1397.539834] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.548155] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195486, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.656782] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.657194] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.657242] env[62627]: INFO nova.compute.manager [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Attaching volume 31039c86-298a-44e8-bac6-8c32f195fa88 to /dev/sdb [ 1397.692564] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8526d8-dea3-4554-b29a-bc1ba21d3c3a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.700626] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898b5f44-4551-4e60-b173-2f97c75d8ecc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.714394] env[62627]: DEBUG nova.virt.block_device [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Updating existing volume attachment record: 8ddd7803-714c-425b-b1a5-10056369212e {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1397.731606] env[62627]: DEBUG nova.compute.manager [req-ae9c0e5a-5873-4ffe-ab0e-5481fdc81b72 req-62e8ca50-c38a-4fac-95d6-9e4502905857 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Received event network-changed-54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1397.731798] env[62627]: DEBUG nova.compute.manager [req-ae9c0e5a-5873-4ffe-ab0e-5481fdc81b72 req-62e8ca50-c38a-4fac-95d6-9e4502905857 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Refreshing instance network info cache due to event network-changed-54f0bc09-a4b0-47de-a805-27b17f146e00. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1397.732029] env[62627]: DEBUG oslo_concurrency.lockutils [req-ae9c0e5a-5873-4ffe-ab0e-5481fdc81b72 req-62e8ca50-c38a-4fac-95d6-9e4502905857 service nova] Acquiring lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.732184] env[62627]: DEBUG oslo_concurrency.lockutils [req-ae9c0e5a-5873-4ffe-ab0e-5481fdc81b72 req-62e8ca50-c38a-4fac-95d6-9e4502905857 service nova] Acquired lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.732346] env[62627]: DEBUG nova.network.neutron [req-ae9c0e5a-5873-4ffe-ab0e-5481fdc81b72 req-62e8ca50-c38a-4fac-95d6-9e4502905857 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Refreshing network info cache for port 54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1397.812720] env[62627]: DEBUG nova.compute.manager [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1397.813034] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1397.813966] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bca500-2293-4dc9-89af-af93d6ae73ec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.822388] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1397.822682] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3055897d-3858-45a2-91b0-e7fe0fc00c6a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.829353] env[62627]: DEBUG oslo_vmware.api [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1397.829353] env[62627]: value = "task-2195487" [ 1397.829353] env[62627]: _type = "Task" [ 1397.829353] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.839721] env[62627]: DEBUG oslo_vmware.api [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.057548] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195486, 'name': CreateVM_Task, 'duration_secs': 0.339145} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.057850] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1398.058696] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.058899] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.059296] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1398.059587] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-816f4c9f-3027-4a07-8f00-01b3f5c0dbc2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.065373] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1398.065373] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]523250a8-16f6-0da8-e560-56db1dc52414" [ 1398.065373] env[62627]: _type = "Task" [ 1398.065373] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.077750] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523250a8-16f6-0da8-e560-56db1dc52414, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.300339] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.300606] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.300855] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.340900] env[62627]: DEBUG oslo_vmware.api [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195487, 'name': PowerOffVM_Task, 'duration_secs': 0.25199} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.341255] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1398.341445] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1398.341711] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-687bfcb7-3572-4222-8930-4596b72f7635 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.410560] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1398.410906] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1398.411215] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleting the datastore file [datastore2] c90dcd2f-c718-41b2-912f-00fa0fe69b68 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1398.411596] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d87c773-29f2-4fc3-b1db-82949740bdbf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.418955] env[62627]: DEBUG oslo_vmware.api [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1398.418955] env[62627]: value = "task-2195490" [ 1398.418955] env[62627]: _type = "Task" [ 1398.418955] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.427285] env[62627]: DEBUG oslo_vmware.api [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.504673] env[62627]: DEBUG nova.network.neutron [req-ae9c0e5a-5873-4ffe-ab0e-5481fdc81b72 req-62e8ca50-c38a-4fac-95d6-9e4502905857 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updated VIF entry in instance network info cache for port 54f0bc09-a4b0-47de-a805-27b17f146e00. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1398.505114] env[62627]: DEBUG nova.network.neutron [req-ae9c0e5a-5873-4ffe-ab0e-5481fdc81b72 req-62e8ca50-c38a-4fac-95d6-9e4502905857 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating instance_info_cache with network_info: [{"id": "54f0bc09-a4b0-47de-a805-27b17f146e00", "address": "fa:16:3e:50:f4:19", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54f0bc09-a4", "ovs_interfaceid": "54f0bc09-a4b0-47de-a805-27b17f146e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.577314] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]523250a8-16f6-0da8-e560-56db1dc52414, 'name': SearchDatastore_Task, 'duration_secs': 0.011834} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.577626] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.577906] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1398.578201] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.578363] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.578558] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1398.578853] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06aba3f0-af53-4ba9-b849-a8cd9ba8f478 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.588015] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1398.588232] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1398.588976] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43ed74ce-73c5-4c9b-947b-ecadf728e5c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.594704] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1398.594704] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ae4994-d8ad-551d-d841-72de864db0a5" [ 1398.594704] env[62627]: _type = "Task" [ 1398.594704] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.602494] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ae4994-d8ad-551d-d841-72de864db0a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.929912] env[62627]: DEBUG oslo_vmware.api [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174801} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.930196] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1398.930388] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1398.930583] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1398.930784] env[62627]: INFO nova.compute.manager [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1398.931062] env[62627]: DEBUG oslo.service.loopingcall [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1398.931263] env[62627]: DEBUG nova.compute.manager [-] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1398.931357] env[62627]: DEBUG nova.network.neutron [-] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1399.008110] env[62627]: DEBUG oslo_concurrency.lockutils [req-ae9c0e5a-5873-4ffe-ab0e-5481fdc81b72 req-62e8ca50-c38a-4fac-95d6-9e4502905857 service nova] Releasing lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.105701] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ae4994-d8ad-551d-d841-72de864db0a5, 'name': SearchDatastore_Task, 'duration_secs': 0.010338} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.106540] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42e24efe-9dbd-4594-90c0-596148c3b2f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.112447] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1399.112447] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cbc918-f8b7-a2bf-3b22-43a5cbd4da55" [ 1399.112447] env[62627]: _type = "Task" [ 1399.112447] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.120790] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cbc918-f8b7-a2bf-3b22-43a5cbd4da55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.332419] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.332626] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.332846] env[62627]: DEBUG nova.network.neutron [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1399.625354] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cbc918-f8b7-a2bf-3b22-43a5cbd4da55, 'name': SearchDatastore_Task, 'duration_secs': 0.01467} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.625703] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.626026] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 77b68106-1abe-426b-89ce-3495776b9233/77b68106-1abe-426b-89ce-3495776b9233.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1399.626263] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-828e3bb0-0e60-4d19-b343-140de22db2e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.634060] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1399.634060] env[62627]: value = "task-2195491" [ 1399.634060] env[62627]: _type = "Task" [ 1399.634060] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.642633] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.686580] env[62627]: DEBUG nova.network.neutron [-] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.758465] env[62627]: DEBUG nova.compute.manager [req-ab0a7955-9c55-4818-8032-30a55819466b req-be7fb942-1a45-437a-8b31-2123a92d398b service nova] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Received event network-vif-deleted-8fa4a67a-bc9d-4eb2-bd67-95ca78f9cf4a {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1400.149476] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195491, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.188876] env[62627]: INFO nova.compute.manager [-] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Took 1.26 seconds to deallocate network for instance. [ 1400.361734] env[62627]: DEBUG nova.network.neutron [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance_info_cache with network_info: [{"id": "4cbd817b-c3ef-4a33-8648-4297830602e0", "address": "fa:16:3e:e3:f3:a5", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cbd817b-c3", "ovs_interfaceid": "4cbd817b-c3ef-4a33-8648-4297830602e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.645486] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195491, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517685} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.645775] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 77b68106-1abe-426b-89ce-3495776b9233/77b68106-1abe-426b-89ce-3495776b9233.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1400.646086] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1400.646284] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d449ef1-16a7-4944-b557-2b94bc5e5b75 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.653413] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1400.653413] env[62627]: value = "task-2195493" [ 1400.653413] env[62627]: _type = "Task" [ 1400.653413] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.661492] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.698804] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.699142] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.699377] env[62627]: DEBUG nova.objects.instance [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lazy-loading 'resources' on Instance uuid c90dcd2f-c718-41b2-912f-00fa0fe69b68 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1400.865092] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.163756] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076852} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.164175] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1401.164828] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7331b051-3412-454e-8644-abc886f7924d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.189687] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 77b68106-1abe-426b-89ce-3495776b9233/77b68106-1abe-426b-89ce-3495776b9233.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1401.190072] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dedcd917-1100-43a9-a4a3-8eac97c7a160 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.213331] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1401.213331] env[62627]: value = "task-2195494" [ 1401.213331] env[62627]: _type = "Task" [ 1401.213331] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.223630] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195494, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.315053] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a524d2-1820-44f5-9253-8d9037f278a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.322887] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2050d5-7bce-4096-8d23-6352e6d34a8e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.353115] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9810e8c-7216-4e1a-b96b-10fb7a1c334d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.360897] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d333e2-2093-4711-adb1-7f754fbf7ba4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.377331] env[62627]: DEBUG nova.compute.provider_tree [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1401.391936] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c52b74c-5e3c-4add-9027-a3a1bcab0341 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.411032] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be4f7ee-9558-4f3d-8217-ea60593b0c7c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.418150] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance '16ed0637-d24d-488d-b9ba-dd5a49a7b2a2' progress to 83 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1401.723925] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195494, 'name': ReconfigVM_Task, 'duration_secs': 0.306716} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.724258] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 77b68106-1abe-426b-89ce-3495776b9233/77b68106-1abe-426b-89ce-3495776b9233.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1401.724920] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9692f172-aed4-4805-8ece-cda628614984 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.732579] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1401.732579] env[62627]: value = "task-2195495" [ 1401.732579] env[62627]: _type = "Task" [ 1401.732579] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.742277] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195495, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.882295] env[62627]: DEBUG nova.scheduler.client.report [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1401.924608] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1401.924906] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f75bb3f0-772b-4d1a-af7a-849f99b746b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.931570] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1401.931570] env[62627]: value = "task-2195496" [ 1401.931570] env[62627]: _type = "Task" [ 1401.931570] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.939797] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195496, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.243628] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195495, 'name': Rename_Task, 'duration_secs': 0.142258} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.244032] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1402.244187] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1658e374-e73c-4193-b222-f3f25d803116 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.250798] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1402.250798] env[62627]: value = "task-2195497" [ 1402.250798] env[62627]: _type = "Task" [ 1402.250798] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.258632] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195497, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.259797] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1402.260052] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447851', 'volume_id': '31039c86-298a-44e8-bac6-8c32f195fa88', 'name': 'volume-31039c86-298a-44e8-bac6-8c32f195fa88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c4ebb14-c3a3-485c-bbb3-405a8b7f92db', 'attached_at': '', 'detached_at': '', 'volume_id': '31039c86-298a-44e8-bac6-8c32f195fa88', 'serial': '31039c86-298a-44e8-bac6-8c32f195fa88'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1402.260844] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e709fd6-3783-42b5-8b09-7ddc055fa1d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.276200] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb97f376-3925-4fcb-bfd1-8a44c6f20497 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.300907] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-31039c86-298a-44e8-bac6-8c32f195fa88/volume-31039c86-298a-44e8-bac6-8c32f195fa88.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1402.301262] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fce9fbcf-9c6e-47e7-ab6e-494ca199e6f0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.319815] env[62627]: DEBUG oslo_vmware.api [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1402.319815] env[62627]: value = "task-2195498" [ 1402.319815] env[62627]: _type = "Task" [ 1402.319815] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.328102] env[62627]: DEBUG oslo_vmware.api [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195498, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.388079] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.688s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.408851] env[62627]: INFO nova.scheduler.client.report [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted allocations for instance c90dcd2f-c718-41b2-912f-00fa0fe69b68 [ 1402.441338] env[62627]: DEBUG oslo_vmware.api [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195496, 'name': PowerOnVM_Task, 'duration_secs': 0.407119} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.441613] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1402.441806] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b6e42b-a14a-4eb5-bf0e-269ddebc7f76 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance '16ed0637-d24d-488d-b9ba-dd5a49a7b2a2' progress to 100 {{(pid=62627) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1402.761018] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195497, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.829977] env[62627]: DEBUG oslo_vmware.api [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195498, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.916461] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9941b578-0158-4da8-84e5-8871492ffbdc tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "c90dcd2f-c718-41b2-912f-00fa0fe69b68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.611s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.273447] env[62627]: DEBUG oslo_vmware.api [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195497, 'name': PowerOnVM_Task, 'duration_secs': 0.649626} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.281375] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1403.281375] env[62627]: INFO nova.compute.manager [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Took 7.81 seconds to spawn the instance on the hypervisor. [ 1403.281375] env[62627]: DEBUG nova.compute.manager [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1403.281375] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1813c407-d5f1-4a82-97e7-2ea9d6c2f5b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.330462] env[62627]: DEBUG oslo_vmware.api [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195498, 'name': ReconfigVM_Task, 'duration_secs': 0.510455} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.331481] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-31039c86-298a-44e8-bac6-8c32f195fa88/volume-31039c86-298a-44e8-bac6-8c32f195fa88.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1403.336424] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bde22f9-18a5-41c7-84d4-f4fcb67c544b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.352833] env[62627]: DEBUG oslo_vmware.api [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1403.352833] env[62627]: value = "task-2195499" [ 1403.352833] env[62627]: _type = "Task" [ 1403.352833] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.362776] env[62627]: DEBUG oslo_vmware.api [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195499, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.429954] env[62627]: DEBUG oslo_vmware.rw_handles [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b060bf-1b0f-2b8f-1e17-00cb666239d1/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1403.430857] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd21ab3-f0c6-4de1-b9dd-9f684a2f87fa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.436779] env[62627]: DEBUG oslo_vmware.rw_handles [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b060bf-1b0f-2b8f-1e17-00cb666239d1/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1403.436946] env[62627]: ERROR oslo_vmware.rw_handles [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b060bf-1b0f-2b8f-1e17-00cb666239d1/disk-0.vmdk due to incomplete transfer. [ 1403.437191] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e3b5953f-13a5-4dbf-aa8f-8e2940bfbf0d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.445049] env[62627]: DEBUG oslo_vmware.rw_handles [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b060bf-1b0f-2b8f-1e17-00cb666239d1/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1403.445254] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Uploaded image 96863610-a259-42f5-aff5-eef943337e28 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1403.447520] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1403.447781] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5d0b8bef-543e-4896-8a0b-ce25bc25ba74 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.455580] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1403.455580] env[62627]: value = "task-2195500" [ 1403.455580] env[62627]: _type = "Task" [ 1403.455580] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.464062] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195500, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.793919] env[62627]: INFO nova.compute.manager [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Took 12.56 seconds to build instance. [ 1403.862846] env[62627]: DEBUG oslo_vmware.api [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195499, 'name': ReconfigVM_Task, 'duration_secs': 0.133156} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.863176] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447851', 'volume_id': '31039c86-298a-44e8-bac6-8c32f195fa88', 'name': 'volume-31039c86-298a-44e8-bac6-8c32f195fa88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c4ebb14-c3a3-485c-bbb3-405a8b7f92db', 'attached_at': '', 'detached_at': '', 'volume_id': '31039c86-298a-44e8-bac6-8c32f195fa88', 'serial': '31039c86-298a-44e8-bac6-8c32f195fa88'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1403.972683] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195500, 'name': Destroy_Task, 'duration_secs': 0.487704} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.972819] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Destroyed the VM [ 1403.973066] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1403.973319] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dfa15465-2e8b-482a-8b57-936563a10b3d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.979203] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1403.979203] env[62627]: value = "task-2195501" [ 1403.979203] env[62627]: _type = "Task" [ 1403.979203] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.988078] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195501, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.296129] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7ad22250-4f0c-4839-a66e-c64bb6c99f41 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.064s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.343559] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.343873] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.490154] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195501, 'name': RemoveSnapshot_Task, 'duration_secs': 0.325185} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.490427] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1404.490702] env[62627]: DEBUG nova.compute.manager [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1404.491510] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88d7f3d-76e9-46ae-a81a-38562c4cf06d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.852095] env[62627]: DEBUG nova.compute.manager [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1404.898740] env[62627]: DEBUG nova.compute.manager [req-10855636-6b75-405f-a560-f4db793fd04f req-0b6e7c2e-5ff4-4790-91ba-01034059f4ac service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Received event network-changed-54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1404.899067] env[62627]: DEBUG nova.compute.manager [req-10855636-6b75-405f-a560-f4db793fd04f req-0b6e7c2e-5ff4-4790-91ba-01034059f4ac service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Refreshing instance network info cache due to event network-changed-54f0bc09-a4b0-47de-a805-27b17f146e00. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1404.899179] env[62627]: DEBUG oslo_concurrency.lockutils [req-10855636-6b75-405f-a560-f4db793fd04f req-0b6e7c2e-5ff4-4790-91ba-01034059f4ac service nova] Acquiring lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.899338] env[62627]: DEBUG oslo_concurrency.lockutils [req-10855636-6b75-405f-a560-f4db793fd04f req-0b6e7c2e-5ff4-4790-91ba-01034059f4ac service nova] Acquired lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.899503] env[62627]: DEBUG nova.network.neutron [req-10855636-6b75-405f-a560-f4db793fd04f req-0b6e7c2e-5ff4-4790-91ba-01034059f4ac service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Refreshing network info cache for port 54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1404.901928] env[62627]: DEBUG nova.objects.instance [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lazy-loading 'flavor' on Instance uuid 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1405.004326] env[62627]: INFO nova.compute.manager [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Shelve offloading [ 1405.131840] env[62627]: DEBUG oslo_concurrency.lockutils [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.371612] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.371863] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.373359] env[62627]: INFO nova.compute.claims [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1405.408395] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2511fa8a-3730-455f-b635-054fa4a8a176 tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.751s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.409704] env[62627]: DEBUG oslo_concurrency.lockutils [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.278s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.442339] env[62627]: DEBUG nova.network.neutron [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Port 4cbd817b-c3ef-4a33-8648-4297830602e0 binding to destination host cpu-1 is already ACTIVE {{(pid=62627) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1405.442604] env[62627]: DEBUG oslo_concurrency.lockutils [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.443466] env[62627]: DEBUG oslo_concurrency.lockutils [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.443466] env[62627]: DEBUG nova.network.neutron [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1405.510445] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1405.513076] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebeefb25-e936-4529-98af-12ed315ded68 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.520438] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1405.520438] env[62627]: value = "task-2195502" [ 1405.520438] env[62627]: _type = "Task" [ 1405.520438] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.529366] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1405.529482] env[62627]: DEBUG nova.compute.manager [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1405.530253] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bf0630-425b-43d1-a5b4-3eb268a7dcde {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.537228] env[62627]: DEBUG oslo_concurrency.lockutils [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.537408] env[62627]: DEBUG oslo_concurrency.lockutils [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.537593] env[62627]: DEBUG nova.network.neutron [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1405.625967] env[62627]: DEBUG nova.network.neutron [req-10855636-6b75-405f-a560-f4db793fd04f req-0b6e7c2e-5ff4-4790-91ba-01034059f4ac service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updated VIF entry in instance network info cache for port 54f0bc09-a4b0-47de-a805-27b17f146e00. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1405.626413] env[62627]: DEBUG nova.network.neutron [req-10855636-6b75-405f-a560-f4db793fd04f req-0b6e7c2e-5ff4-4790-91ba-01034059f4ac service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating instance_info_cache with network_info: [{"id": "54f0bc09-a4b0-47de-a805-27b17f146e00", "address": "fa:16:3e:50:f4:19", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54f0bc09-a4", "ovs_interfaceid": "54f0bc09-a4b0-47de-a805-27b17f146e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.914866] env[62627]: INFO nova.compute.manager [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Detaching volume 31039c86-298a-44e8-bac6-8c32f195fa88 [ 1405.949806] env[62627]: INFO nova.virt.block_device [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Attempting to driver detach volume 31039c86-298a-44e8-bac6-8c32f195fa88 from mountpoint /dev/sdb [ 1405.950090] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1405.950288] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447851', 'volume_id': '31039c86-298a-44e8-bac6-8c32f195fa88', 'name': 'volume-31039c86-298a-44e8-bac6-8c32f195fa88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c4ebb14-c3a3-485c-bbb3-405a8b7f92db', 'attached_at': '', 'detached_at': '', 'volume_id': '31039c86-298a-44e8-bac6-8c32f195fa88', 'serial': '31039c86-298a-44e8-bac6-8c32f195fa88'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1405.951310] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f281f8-d2a9-461a-b2e1-d687d749cda7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.972324] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397925c3-4b82-40cd-bce0-379be41b5991 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.979300] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0172d2ef-3a58-48ed-9e17-ced325c51093 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.001467] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f14f50-7e09-4b49-a613-e4595f62320b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.015420] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] The volume has not been displaced from its original location: [datastore2] volume-31039c86-298a-44e8-bac6-8c32f195fa88/volume-31039c86-298a-44e8-bac6-8c32f195fa88.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1406.020613] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1406.021305] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-504c605e-3e2d-435a-8b1d-134335072aaf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.041846] env[62627]: DEBUG oslo_vmware.api [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1406.041846] env[62627]: value = "task-2195503" [ 1406.041846] env[62627]: _type = "Task" [ 1406.041846] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.050579] env[62627]: DEBUG oslo_vmware.api [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195503, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.129594] env[62627]: DEBUG oslo_concurrency.lockutils [req-10855636-6b75-405f-a560-f4db793fd04f req-0b6e7c2e-5ff4-4790-91ba-01034059f4ac service nova] Releasing lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.213672] env[62627]: DEBUG nova.network.neutron [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance_info_cache with network_info: [{"id": "4cbd817b-c3ef-4a33-8648-4297830602e0", "address": "fa:16:3e:e3:f3:a5", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cbd817b-c3", "ovs_interfaceid": "4cbd817b-c3ef-4a33-8648-4297830602e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.262483] env[62627]: DEBUG nova.network.neutron [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updating instance_info_cache with network_info: [{"id": "71c1451f-de1d-4700-8326-282bc39bb6f7", "address": "fa:16:3e:fb:be:f1", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71c1451f-de", "ovs_interfaceid": "71c1451f-de1d-4700-8326-282bc39bb6f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.479896] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5c9fed-e12b-47c2-abe8-9e536db453af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.487368] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbae323-baa4-4d31-a2e8-fdc1d00388be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.517950] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d64d41-f7f6-4f92-92b1-ce75326b0e42 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.524725] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9f1c8b-5ac3-47ba-bc46-58e8b32a3944 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.537656] env[62627]: DEBUG nova.compute.provider_tree [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1406.550184] env[62627]: DEBUG oslo_vmware.api [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195503, 'name': ReconfigVM_Task, 'duration_secs': 0.222969} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.550435] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1406.555142] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf13a2da-4194-4852-9293-d2eb2be74793 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.569124] env[62627]: DEBUG oslo_vmware.api [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1406.569124] env[62627]: value = "task-2195504" [ 1406.569124] env[62627]: _type = "Task" [ 1406.569124] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.576236] env[62627]: DEBUG oslo_vmware.api [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195504, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.716349] env[62627]: DEBUG oslo_concurrency.lockutils [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.765436] env[62627]: DEBUG oslo_concurrency.lockutils [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.974586] env[62627]: DEBUG nova.compute.manager [req-895613f9-318a-4802-9364-1858c8af1eeb req-385648af-c874-40c7-9138-3d83b3d87e2f service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Received event network-vif-unplugged-71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1406.974809] env[62627]: DEBUG oslo_concurrency.lockutils [req-895613f9-318a-4802-9364-1858c8af1eeb req-385648af-c874-40c7-9138-3d83b3d87e2f service nova] Acquiring lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.975033] env[62627]: DEBUG oslo_concurrency.lockutils [req-895613f9-318a-4802-9364-1858c8af1eeb req-385648af-c874-40c7-9138-3d83b3d87e2f service nova] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.975209] env[62627]: DEBUG oslo_concurrency.lockutils [req-895613f9-318a-4802-9364-1858c8af1eeb req-385648af-c874-40c7-9138-3d83b3d87e2f service nova] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.975377] env[62627]: DEBUG nova.compute.manager [req-895613f9-318a-4802-9364-1858c8af1eeb req-385648af-c874-40c7-9138-3d83b3d87e2f service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] No waiting events found dispatching network-vif-unplugged-71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1406.975544] env[62627]: WARNING nova.compute.manager [req-895613f9-318a-4802-9364-1858c8af1eeb req-385648af-c874-40c7-9138-3d83b3d87e2f service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Received unexpected event network-vif-unplugged-71c1451f-de1d-4700-8326-282bc39bb6f7 for instance with vm_state shelved and task_state shelving_offloading. [ 1407.040822] env[62627]: DEBUG nova.scheduler.client.report [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1407.078618] env[62627]: DEBUG oslo_vmware.api [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195504, 'name': ReconfigVM_Task, 'duration_secs': 0.127453} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.078980] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447851', 'volume_id': '31039c86-298a-44e8-bac6-8c32f195fa88', 'name': 'volume-31039c86-298a-44e8-bac6-8c32f195fa88', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c4ebb14-c3a3-485c-bbb3-405a8b7f92db', 'attached_at': '', 'detached_at': '', 'volume_id': '31039c86-298a-44e8-bac6-8c32f195fa88', 'serial': '31039c86-298a-44e8-bac6-8c32f195fa88'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1407.103203] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1407.104063] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8deb6b-e4f5-4fec-a369-b5d0ce9888c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.111540] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1407.111779] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c04c0e7-450e-48f3-8809-ca154e5e222e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.175757] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1407.176099] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1407.176358] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleting the datastore file [datastore2] 8c170084-a7f8-4774-8f9d-7874103b0f4f {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1407.176677] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97c7d2fe-f105-4669-80c3-239b032bb1f7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.183693] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1407.183693] env[62627]: value = "task-2195506" [ 1407.183693] env[62627]: _type = "Task" [ 1407.183693] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.193265] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195506, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.219646] env[62627]: DEBUG nova.compute.manager [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62627) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1407.219886] env[62627]: DEBUG oslo_concurrency.lockutils [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.545229] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.173s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.546031] env[62627]: DEBUG nova.compute.manager [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1407.548865] env[62627]: DEBUG oslo_concurrency.lockutils [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.329s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.623972] env[62627]: DEBUG nova.objects.instance [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lazy-loading 'flavor' on Instance uuid 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1407.693400] env[62627]: DEBUG oslo_vmware.api [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195506, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122861} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.693635] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1407.693821] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1407.693997] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1407.712941] env[62627]: INFO nova.scheduler.client.report [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleted allocations for instance 8c170084-a7f8-4774-8f9d-7874103b0f4f [ 1408.052037] env[62627]: DEBUG nova.compute.utils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1408.053385] env[62627]: DEBUG nova.compute.manager [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1408.053553] env[62627]: DEBUG nova.network.neutron [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1408.055541] env[62627]: DEBUG nova.objects.instance [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'migration_context' on Instance uuid 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1408.095394] env[62627]: DEBUG nova.policy [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd847ab3ec20743d0997defea8008448e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '710e7463994b4ec08bcc6d479cd1138b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1408.220412] env[62627]: DEBUG oslo_concurrency.lockutils [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.435292] env[62627]: DEBUG nova.network.neutron [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Successfully created port: 9fa1aa42-f4a9-48e3-946a-48622c860506 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1408.558687] env[62627]: DEBUG nova.compute.manager [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1408.630977] env[62627]: DEBUG oslo_concurrency.lockutils [None req-09ac693c-427f-4d09-9af7-1de94a11b57c tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.221s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.660114] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132ca8f4-236e-44c7-a971-04a65c4a08ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.667826] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c9e0ca-e0da-4ac9-8a6d-708d91d1b19b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.699145] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5400f1c2-e02a-4728-8073-45b8a45e9ea6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.706685] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51148618-f424-41a5-b339-831398d0e6f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.719937] env[62627]: DEBUG nova.compute.provider_tree [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1409.004889] env[62627]: DEBUG nova.compute.manager [req-9fc26cfb-692f-4a88-8794-8f729437b2c9 req-8dd465f1-3c4f-4c6d-9b10-91fd27f0210d service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Received event network-changed-71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1409.005126] env[62627]: DEBUG nova.compute.manager [req-9fc26cfb-692f-4a88-8794-8f729437b2c9 req-8dd465f1-3c4f-4c6d-9b10-91fd27f0210d service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Refreshing instance network info cache due to event network-changed-71c1451f-de1d-4700-8326-282bc39bb6f7. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1409.005347] env[62627]: DEBUG oslo_concurrency.lockutils [req-9fc26cfb-692f-4a88-8794-8f729437b2c9 req-8dd465f1-3c4f-4c6d-9b10-91fd27f0210d service nova] Acquiring lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.006181] env[62627]: DEBUG oslo_concurrency.lockutils [req-9fc26cfb-692f-4a88-8794-8f729437b2c9 req-8dd465f1-3c4f-4c6d-9b10-91fd27f0210d service nova] Acquired lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.006181] env[62627]: DEBUG nova.network.neutron [req-9fc26cfb-692f-4a88-8794-8f729437b2c9 req-8dd465f1-3c4f-4c6d-9b10-91fd27f0210d service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Refreshing network info cache for port 71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1409.225784] env[62627]: DEBUG nova.scheduler.client.report [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1409.384398] env[62627]: DEBUG oslo_concurrency.lockutils [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.384657] env[62627]: DEBUG oslo_concurrency.lockutils [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.384859] env[62627]: DEBUG oslo_concurrency.lockutils [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.385067] env[62627]: DEBUG oslo_concurrency.lockutils [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.385238] env[62627]: DEBUG oslo_concurrency.lockutils [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1409.387414] env[62627]: INFO nova.compute.manager [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Terminating instance [ 1409.574163] env[62627]: DEBUG nova.compute.manager [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1409.600175] env[62627]: DEBUG nova.virt.hardware [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1409.600314] env[62627]: DEBUG nova.virt.hardware [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1409.601033] env[62627]: DEBUG nova.virt.hardware [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1409.601033] env[62627]: DEBUG nova.virt.hardware [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1409.601033] env[62627]: DEBUG nova.virt.hardware [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1409.601033] env[62627]: DEBUG nova.virt.hardware [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1409.601238] env[62627]: DEBUG nova.virt.hardware [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1409.601422] env[62627]: DEBUG nova.virt.hardware [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1409.601497] env[62627]: DEBUG nova.virt.hardware [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1409.601663] env[62627]: DEBUG nova.virt.hardware [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1409.601840] env[62627]: DEBUG nova.virt.hardware [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1409.602759] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ddeb9c-81e0-4296-b540-3d482506430b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.612969] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41c0f27-ec55-4206-a910-8d25fcd8c970 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.628407] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.741630] env[62627]: DEBUG nova.network.neutron [req-9fc26cfb-692f-4a88-8794-8f729437b2c9 req-8dd465f1-3c4f-4c6d-9b10-91fd27f0210d service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updated VIF entry in instance network info cache for port 71c1451f-de1d-4700-8326-282bc39bb6f7. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1409.742015] env[62627]: DEBUG nova.network.neutron [req-9fc26cfb-692f-4a88-8794-8f729437b2c9 req-8dd465f1-3c4f-4c6d-9b10-91fd27f0210d service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updating instance_info_cache with network_info: [{"id": "71c1451f-de1d-4700-8326-282bc39bb6f7", "address": "fa:16:3e:fb:be:f1", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap71c1451f-de", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.891342] env[62627]: DEBUG nova.compute.manager [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1409.891593] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1409.892530] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cdbadea-dfeb-4472-8bd7-1d28a0ba6a89 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.900484] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1409.900743] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f95487c-6d2f-4395-80fe-4b7ddeb397de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.906737] env[62627]: DEBUG oslo_vmware.api [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1409.906737] env[62627]: value = "task-2195507" [ 1409.906737] env[62627]: _type = "Task" [ 1409.906737] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.914425] env[62627]: DEBUG oslo_vmware.api [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195507, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.951127] env[62627]: DEBUG nova.network.neutron [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Successfully updated port: 9fa1aa42-f4a9-48e3-946a-48622c860506 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1410.235401] env[62627]: DEBUG oslo_concurrency.lockutils [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.686s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.241715] env[62627]: DEBUG oslo_concurrency.lockutils [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.022s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.241945] env[62627]: DEBUG nova.objects.instance [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'resources' on Instance uuid 8c170084-a7f8-4774-8f9d-7874103b0f4f {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1410.244632] env[62627]: DEBUG oslo_concurrency.lockutils [req-9fc26cfb-692f-4a88-8794-8f729437b2c9 req-8dd465f1-3c4f-4c6d-9b10-91fd27f0210d service nova] Releasing lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.416626] env[62627]: DEBUG oslo_vmware.api [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195507, 'name': PowerOffVM_Task, 'duration_secs': 0.187656} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.416806] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1410.416971] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1410.417240] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2c8bcf6-59ee-425b-9200-62e647cbb6f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.457633] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "refresh_cache-6a53e7a7-ec4b-4a97-98f7-8d686df3178c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.457633] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "refresh_cache-6a53e7a7-ec4b-4a97-98f7-8d686df3178c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.457633] env[62627]: DEBUG nova.network.neutron [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1410.476509] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1410.476654] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1410.476834] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleting the datastore file [datastore2] 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1410.477113] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-286abc08-c3ce-4cbd-be1e-ffa2500f8b1f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.484035] env[62627]: DEBUG oslo_vmware.api [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for the task: (returnval){ [ 1410.484035] env[62627]: value = "task-2195509" [ 1410.484035] env[62627]: _type = "Task" [ 1410.484035] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.491942] env[62627]: DEBUG oslo_vmware.api [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.749477] env[62627]: DEBUG nova.objects.instance [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'numa_topology' on Instance uuid 8c170084-a7f8-4774-8f9d-7874103b0f4f {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1410.990961] env[62627]: DEBUG nova.network.neutron [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1410.996305] env[62627]: DEBUG oslo_vmware.api [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Task: {'id': task-2195509, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14684} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.996534] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1410.996715] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1410.996962] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1410.997232] env[62627]: INFO nova.compute.manager [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1410.997504] env[62627]: DEBUG oslo.service.loopingcall [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1410.997721] env[62627]: DEBUG nova.compute.manager [-] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1410.997818] env[62627]: DEBUG nova.network.neutron [-] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1411.036249] env[62627]: DEBUG nova.compute.manager [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Received event network-vif-plugged-9fa1aa42-f4a9-48e3-946a-48622c860506 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1411.036440] env[62627]: DEBUG oslo_concurrency.lockutils [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] Acquiring lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.036652] env[62627]: DEBUG oslo_concurrency.lockutils [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] Lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.036821] env[62627]: DEBUG oslo_concurrency.lockutils [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] Lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.036987] env[62627]: DEBUG nova.compute.manager [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] No waiting events found dispatching network-vif-plugged-9fa1aa42-f4a9-48e3-946a-48622c860506 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1411.037165] env[62627]: WARNING nova.compute.manager [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Received unexpected event network-vif-plugged-9fa1aa42-f4a9-48e3-946a-48622c860506 for instance with vm_state building and task_state spawning. [ 1411.037324] env[62627]: DEBUG nova.compute.manager [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Received event network-changed-9fa1aa42-f4a9-48e3-946a-48622c860506 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1411.037474] env[62627]: DEBUG nova.compute.manager [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Refreshing instance network info cache due to event network-changed-9fa1aa42-f4a9-48e3-946a-48622c860506. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1411.037665] env[62627]: DEBUG oslo_concurrency.lockutils [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] Acquiring lock "refresh_cache-6a53e7a7-ec4b-4a97-98f7-8d686df3178c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.129401] env[62627]: DEBUG nova.network.neutron [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Updating instance_info_cache with network_info: [{"id": "9fa1aa42-f4a9-48e3-946a-48622c860506", "address": "fa:16:3e:82:cb:d8", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fa1aa42-f4", "ovs_interfaceid": "9fa1aa42-f4a9-48e3-946a-48622c860506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.253709] env[62627]: DEBUG nova.objects.base [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Object Instance<8c170084-a7f8-4774-8f9d-7874103b0f4f> lazy-loaded attributes: resources,numa_topology {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1411.379328] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2d9bf0-19d3-4de8-b785-d4692190ce41 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.388081] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d31948-798e-4bab-bff1-b736787b257c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.418325] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b4e27b-ad6e-4c7f-bbca-5f80d02601d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.427337] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0c9b1b-4314-4a98-bc6e-3a5eb16a364e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.442711] env[62627]: DEBUG nova.compute.provider_tree [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.523210] env[62627]: DEBUG nova.compute.manager [req-64da5e74-7761-4df3-bd68-b5f766f5bd95 req-336d656c-e611-4a08-94c4-2797a1127235 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Received event network-vif-deleted-e8d180db-2d86-4c84-b9dc-877a37a56877 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1411.523430] env[62627]: INFO nova.compute.manager [req-64da5e74-7761-4df3-bd68-b5f766f5bd95 req-336d656c-e611-4a08-94c4-2797a1127235 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Neutron deleted interface e8d180db-2d86-4c84-b9dc-877a37a56877; detaching it from the instance and deleting it from the info cache [ 1411.523605] env[62627]: DEBUG nova.network.neutron [req-64da5e74-7761-4df3-bd68-b5f766f5bd95 req-336d656c-e611-4a08-94c4-2797a1127235 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.635581] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "refresh_cache-6a53e7a7-ec4b-4a97-98f7-8d686df3178c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.635581] env[62627]: DEBUG nova.compute.manager [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Instance network_info: |[{"id": "9fa1aa42-f4a9-48e3-946a-48622c860506", "address": "fa:16:3e:82:cb:d8", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fa1aa42-f4", "ovs_interfaceid": "9fa1aa42-f4a9-48e3-946a-48622c860506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1411.635581] env[62627]: DEBUG oslo_concurrency.lockutils [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] Acquired lock "refresh_cache-6a53e7a7-ec4b-4a97-98f7-8d686df3178c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.635581] env[62627]: DEBUG nova.network.neutron [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Refreshing network info cache for port 9fa1aa42-f4a9-48e3-946a-48622c860506 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1411.635581] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:cb:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a94c29-ddd5-4383-9219-1c2c3bb09cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9fa1aa42-f4a9-48e3-946a-48622c860506', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1411.648980] env[62627]: DEBUG oslo.service.loopingcall [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1411.648980] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1411.648980] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-785518c5-be55-4df9-9bdd-80f7deb0a5f6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.669017] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1411.669017] env[62627]: value = "task-2195510" [ 1411.669017] env[62627]: _type = "Task" [ 1411.669017] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.678350] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195510, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.777851] env[62627]: INFO nova.compute.manager [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Swapping old allocation on dict_keys(['c17e7f32-68d8-4e2f-8979-15d701de02f7']) held by migration 0b97dcea-30da-4655-bf24-00a8e55b165d for instance [ 1411.801192] env[62627]: DEBUG nova.scheduler.client.report [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Overwriting current allocation {'allocations': {'c17e7f32-68d8-4e2f-8979-15d701de02f7': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 174}}, 'project_id': '97c11d791dcf4f94a8e8bda7a6f014cd', 'user_id': 'e72f59cc4a02460088e86ad29af1e0ff', 'consumer_generation': 1} on consumer 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2 {{(pid=62627) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1411.876305] env[62627]: DEBUG oslo_concurrency.lockutils [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.876531] env[62627]: DEBUG oslo_concurrency.lockutils [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.876732] env[62627]: DEBUG nova.network.neutron [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1411.946502] env[62627]: DEBUG nova.scheduler.client.report [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1412.005876] env[62627]: DEBUG nova.network.neutron [-] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.026286] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aeb2910b-8171-4069-bf18-0a40d689d050 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.035360] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8df992-5379-424b-afbc-c37776d9fb95 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.061710] env[62627]: DEBUG nova.compute.manager [req-64da5e74-7761-4df3-bd68-b5f766f5bd95 req-336d656c-e611-4a08-94c4-2797a1127235 service nova] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Detach interface failed, port_id=e8d180db-2d86-4c84-b9dc-877a37a56877, reason: Instance 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1412.179033] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195510, 'name': CreateVM_Task, 'duration_secs': 0.343113} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.179172] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1412.179904] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.180556] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.180950] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1412.181225] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-186b8fd7-6c60-4015-b23a-756023216a15 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.186108] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1412.186108] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528d752b-8011-0492-5661-8f3954fb1936" [ 1412.186108] env[62627]: _type = "Task" [ 1412.186108] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.195645] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528d752b-8011-0492-5661-8f3954fb1936, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.382148] env[62627]: DEBUG nova.network.neutron [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Updated VIF entry in instance network info cache for port 9fa1aa42-f4a9-48e3-946a-48622c860506. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1412.382504] env[62627]: DEBUG nova.network.neutron [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Updating instance_info_cache with network_info: [{"id": "9fa1aa42-f4a9-48e3-946a-48622c860506", "address": "fa:16:3e:82:cb:d8", "network": {"id": "93a120a3-5be9-4822-960a-7dc2d246a740", "bridge": "br-int", "label": "tempest-ServersTestJSON-1051356861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "710e7463994b4ec08bcc6d479cd1138b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a94c29-ddd5-4383-9219-1c2c3bb09cc5", "external-id": "nsx-vlan-transportzone-2", "segmentation_id": 2, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fa1aa42-f4", "ovs_interfaceid": "9fa1aa42-f4a9-48e3-946a-48622c860506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.451340] env[62627]: DEBUG oslo_concurrency.lockutils [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.210s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.509069] env[62627]: INFO nova.compute.manager [-] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Took 1.51 seconds to deallocate network for instance. [ 1412.631204] env[62627]: DEBUG nova.network.neutron [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance_info_cache with network_info: [{"id": "4cbd817b-c3ef-4a33-8648-4297830602e0", "address": "fa:16:3e:e3:f3:a5", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cbd817b-c3", "ovs_interfaceid": "4cbd817b-c3ef-4a33-8648-4297830602e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.696593] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528d752b-8011-0492-5661-8f3954fb1936, 'name': SearchDatastore_Task, 'duration_secs': 0.009069} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.696909] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.697169] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1412.697407] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.697554] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.697733] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1412.698023] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-718603e9-7032-47d4-ab19-46465aada949 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.705830] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1412.706074] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1412.707137] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d0b00b7-e652-4011-83ef-718426347dc6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.712239] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1412.712239] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52126874-b4f6-d582-8619-81cb74554897" [ 1412.712239] env[62627]: _type = "Task" [ 1412.712239] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.719520] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52126874-b4f6-d582-8619-81cb74554897, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.885087] env[62627]: DEBUG oslo_concurrency.lockutils [req-5afc53fa-46fa-499a-8b2f-6069ba15d598 req-c6d86fcf-780b-4222-8f8d-f4aca28c7d70 service nova] Releasing lock "refresh_cache-6a53e7a7-ec4b-4a97-98f7-8d686df3178c" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.959300] env[62627]: DEBUG oslo_concurrency.lockutils [None req-78948ae5-b6d8-42ec-b2af-4f985b81c487 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.303s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.960406] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.332s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.960406] env[62627]: INFO nova.compute.manager [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Unshelving [ 1413.014920] env[62627]: DEBUG oslo_concurrency.lockutils [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.015203] env[62627]: DEBUG oslo_concurrency.lockutils [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.015419] env[62627]: DEBUG nova.objects.instance [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lazy-loading 'resources' on Instance uuid 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1413.134890] env[62627]: DEBUG oslo_concurrency.lockutils [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.135405] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1413.135643] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05b0689b-7ca3-4fba-9b03-bcf6c7dcf549 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.143261] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1413.143261] env[62627]: value = "task-2195511" [ 1413.143261] env[62627]: _type = "Task" [ 1413.143261] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.154872] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195511, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.222931] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52126874-b4f6-d582-8619-81cb74554897, 'name': SearchDatastore_Task, 'duration_secs': 0.007637} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.223725] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a694fa62-13f9-46a2-a558-aa02d6690f9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.229395] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1413.229395] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52722cd9-a694-d653-d4c0-08c8835c3ef0" [ 1413.229395] env[62627]: _type = "Task" [ 1413.229395] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.237833] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52722cd9-a694-d653-d4c0-08c8835c3ef0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.602542] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3d38a4-4d78-47e1-8535-e8d69b4ac9b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.610268] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d260b1c-8234-403d-b4d1-f1d8a11532db {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.640791] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848dae0d-73a5-4a19-bd38-a9a14f279f60 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.650395] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5dda88-7222-4053-b88b-38426cd08c02 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.656565] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195511, 'name': PowerOffVM_Task, 'duration_secs': 0.272942} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.657133] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1413.657841] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1413.658066] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1413.658228] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1413.658410] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1413.658555] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1413.658702] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1413.658912] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1413.659083] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1413.659249] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1413.659409] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1413.659579] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1413.673263] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32268bbc-aaf1-4631-b787-607cd1117add {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.683190] env[62627]: DEBUG nova.compute.provider_tree [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.691685] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1413.691685] env[62627]: value = "task-2195512" [ 1413.691685] env[62627]: _type = "Task" [ 1413.691685] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.700927] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195512, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.739365] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52722cd9-a694-d653-d4c0-08c8835c3ef0, 'name': SearchDatastore_Task, 'duration_secs': 0.009677} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.739674] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.740275] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 6a53e7a7-ec4b-4a97-98f7-8d686df3178c/6a53e7a7-ec4b-4a97-98f7-8d686df3178c.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1413.740275] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30f24a26-7e74-4de1-a385-c3f6b7f326f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.748380] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1413.748380] env[62627]: value = "task-2195513" [ 1413.748380] env[62627]: _type = "Task" [ 1413.748380] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.756103] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195513, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.085458] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.186845] env[62627]: DEBUG nova.scheduler.client.report [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1414.202226] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195512, 'name': ReconfigVM_Task, 'duration_secs': 0.162435} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.203975] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e56f8d7-56b5-4a81-86df-0f60be622bfb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.223677] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1414.223839] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1414.224009] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1414.224203] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1414.224353] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1414.224501] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1414.224700] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1414.224856] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1414.225033] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1414.225202] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1414.225372] env[62627]: DEBUG nova.virt.hardware [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1414.226129] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-041c8a8f-4e2b-4773-86b0-9e180b3e37af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.231919] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1414.231919] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a70388-073e-1292-5e46-37a93832ecf8" [ 1414.231919] env[62627]: _type = "Task" [ 1414.231919] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.239953] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52a70388-073e-1292-5e46-37a93832ecf8, 'name': SearchDatastore_Task, 'duration_secs': 0.006283} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.245096] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1414.245337] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2552775-c237-424a-91b1-e388e40a3b77 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.265052] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195513, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.266200] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1414.266200] env[62627]: value = "task-2195514" [ 1414.266200] env[62627]: _type = "Task" [ 1414.266200] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.276169] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195514, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.692543] env[62627]: DEBUG oslo_concurrency.lockutils [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.677s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.694865] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.610s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.695109] env[62627]: DEBUG nova.objects.instance [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'pci_requests' on Instance uuid 8c170084-a7f8-4774-8f9d-7874103b0f4f {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1414.709609] env[62627]: INFO nova.scheduler.client.report [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Deleted allocations for instance 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db [ 1414.767717] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195513, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.775101] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195514, 'name': ReconfigVM_Task, 'duration_secs': 0.188122} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.775366] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1414.776114] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81798f19-d178-414b-9d44-fd7498bb3b19 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.799307] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2/16ed0637-d24d-488d-b9ba-dd5a49a7b2a2.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1414.799546] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5aba60d-cda1-4132-958d-7a8459fa01a1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.818048] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1414.818048] env[62627]: value = "task-2195515" [ 1414.818048] env[62627]: _type = "Task" [ 1414.818048] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.826283] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195515, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.199957] env[62627]: DEBUG nova.objects.instance [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'numa_topology' on Instance uuid 8c170084-a7f8-4774-8f9d-7874103b0f4f {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1415.217358] env[62627]: DEBUG oslo_concurrency.lockutils [None req-af87af18-8ee2-4106-aecf-6b509af6e7eb tempest-AttachVolumeNegativeTest-1428764152 tempest-AttachVolumeNegativeTest-1428764152-project-member] Lock "6c4ebb14-c3a3-485c-bbb3-405a8b7f92db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.833s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.267589] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195513, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.45973} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.267846] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 6a53e7a7-ec4b-4a97-98f7-8d686df3178c/6a53e7a7-ec4b-4a97-98f7-8d686df3178c.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1415.268067] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1415.268313] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6757e443-7d7a-478a-9be9-e13eb761bffa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.275302] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1415.275302] env[62627]: value = "task-2195516" [ 1415.275302] env[62627]: _type = "Task" [ 1415.275302] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.282287] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195516, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.326750] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195515, 'name': ReconfigVM_Task, 'duration_secs': 0.250939} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.327015] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2/16ed0637-d24d-488d-b9ba-dd5a49a7b2a2.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1415.327861] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325c5597-b9e5-44fb-b08c-07178ca2644e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.345530] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfcf14a-b74f-4639-83aa-4b57ccc031af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.363068] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3716e2c-6b21-469b-99a4-e28e66523513 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.380158] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc34bf7f-2409-4d45-aa1f-4f0a26c4ff05 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.386176] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1415.386389] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5202853-fe92-404f-8a01-6075d2f5841f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.391698] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1415.391698] env[62627]: value = "task-2195517" [ 1415.391698] env[62627]: _type = "Task" [ 1415.391698] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.399755] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195517, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.702712] env[62627]: INFO nova.compute.claims [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1415.784985] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195516, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065709} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.785345] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1415.786157] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d5eb01-a1ce-43e5-9174-3c9d2df791e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.808451] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 6a53e7a7-ec4b-4a97-98f7-8d686df3178c/6a53e7a7-ec4b-4a97-98f7-8d686df3178c.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1415.809026] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a0167ca-2ced-404d-ba88-03d711413972 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.827732] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1415.827732] env[62627]: value = "task-2195518" [ 1415.827732] env[62627]: _type = "Task" [ 1415.827732] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.836564] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195518, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.901424] env[62627]: DEBUG oslo_vmware.api [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195517, 'name': PowerOnVM_Task, 'duration_secs': 0.392615} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.901759] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1416.337461] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195518, 'name': ReconfigVM_Task, 'duration_secs': 0.299672} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.337810] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 6a53e7a7-ec4b-4a97-98f7-8d686df3178c/6a53e7a7-ec4b-4a97-98f7-8d686df3178c.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1416.338512] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b4d3b1e-69a9-4aed-a7e7-7cc28d2f5792 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.344863] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1416.344863] env[62627]: value = "task-2195520" [ 1416.344863] env[62627]: _type = "Task" [ 1416.344863] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.353338] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195520, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.781791] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1edd8c-b804-412d-883d-c15e62b39aec {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.789364] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1653dc59-3b39-4063-8703-c61501c004c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.819405] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25778c8a-b573-40a1-b05c-09e2166f1367 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.826165] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee86d619-3617-4d2b-81a6-07144c2e66cc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.838867] env[62627]: DEBUG nova.compute.provider_tree [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.853722] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195520, 'name': Rename_Task, 'duration_secs': 0.139126} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.853978] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1416.854222] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7c00aff-d7db-4f86-b3af-344e0f432a47 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.860286] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1416.860286] env[62627]: value = "task-2195521" [ 1416.860286] env[62627]: _type = "Task" [ 1416.860286] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.867268] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195521, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.913225] env[62627]: INFO nova.compute.manager [None req-059848ef-d58e-4b19-ae75-c5bbb2e23fa4 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance to original state: 'active' [ 1417.341864] env[62627]: DEBUG nova.scheduler.client.report [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1417.370698] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195521, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.847102] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.152s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.870477] env[62627]: DEBUG oslo_vmware.api [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195521, 'name': PowerOnVM_Task, 'duration_secs': 0.51129} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.870765] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1417.870951] env[62627]: INFO nova.compute.manager [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1417.871155] env[62627]: DEBUG nova.compute.manager [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1417.871938] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ea7873-34dd-462c-8ae5-83dd25e6689d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.875606] env[62627]: INFO nova.network.neutron [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updating port 71c1451f-de1d-4700-8326-282bc39bb6f7 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1417.932142] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.932396] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.932529] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.932715] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.932885] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.935564] env[62627]: INFO nova.compute.manager [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Terminating instance [ 1418.388429] env[62627]: INFO nova.compute.manager [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Took 13.03 seconds to build instance. [ 1418.440306] env[62627]: DEBUG nova.compute.manager [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1418.440498] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1418.441421] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95227d1b-2d2c-4366-9263-b817a4d87016 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.449421] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1418.449650] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7dea69d0-1d81-465e-b45b-61eb4088b092 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.455742] env[62627]: DEBUG oslo_vmware.api [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1418.455742] env[62627]: value = "task-2195523" [ 1418.455742] env[62627]: _type = "Task" [ 1418.455742] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.463469] env[62627]: DEBUG oslo_vmware.api [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.890405] env[62627]: DEBUG oslo_concurrency.lockutils [None req-20b229a4-0ab8-4a22-bd0c-0f43380df851 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.546s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.966468] env[62627]: DEBUG oslo_vmware.api [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195523, 'name': PowerOffVM_Task, 'duration_secs': 0.213054} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.966766] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1418.966958] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1418.967230] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8636da36-2caa-4e2e-81c7-131c6345ce50 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.023532] env[62627]: DEBUG oslo_concurrency.lockutils [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.023784] env[62627]: DEBUG oslo_concurrency.lockutils [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.023960] env[62627]: DEBUG nova.compute.manager [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1419.024906] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f65b6ef-a561-4247-8630-2b6b862b0bb0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.031387] env[62627]: DEBUG nova.compute.manager [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62627) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1419.031937] env[62627]: DEBUG nova.objects.instance [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lazy-loading 'flavor' on Instance uuid 6a53e7a7-ec4b-4a97-98f7-8d686df3178c {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1419.034902] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1419.035106] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1419.035285] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleting the datastore file [datastore2] 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1419.035511] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7dc4ce8-c12a-46fd-9194-f66ac45cb042 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.041615] env[62627]: DEBUG oslo_vmware.api [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1419.041615] env[62627]: value = "task-2195525" [ 1419.041615] env[62627]: _type = "Task" [ 1419.041615] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.050482] env[62627]: DEBUG oslo_vmware.api [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.297593] env[62627]: DEBUG nova.compute.manager [req-efae1d45-4dc6-4e37-8d92-1bbcdba5ff12 req-716ee210-219f-4d3d-91ed-bc299f654f10 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Received event network-vif-plugged-71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1419.297838] env[62627]: DEBUG oslo_concurrency.lockutils [req-efae1d45-4dc6-4e37-8d92-1bbcdba5ff12 req-716ee210-219f-4d3d-91ed-bc299f654f10 service nova] Acquiring lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.297953] env[62627]: DEBUG oslo_concurrency.lockutils [req-efae1d45-4dc6-4e37-8d92-1bbcdba5ff12 req-716ee210-219f-4d3d-91ed-bc299f654f10 service nova] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.298167] env[62627]: DEBUG oslo_concurrency.lockutils [req-efae1d45-4dc6-4e37-8d92-1bbcdba5ff12 req-716ee210-219f-4d3d-91ed-bc299f654f10 service nova] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.298372] env[62627]: DEBUG nova.compute.manager [req-efae1d45-4dc6-4e37-8d92-1bbcdba5ff12 req-716ee210-219f-4d3d-91ed-bc299f654f10 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] No waiting events found dispatching network-vif-plugged-71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1419.298529] env[62627]: WARNING nova.compute.manager [req-efae1d45-4dc6-4e37-8d92-1bbcdba5ff12 req-716ee210-219f-4d3d-91ed-bc299f654f10 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Received unexpected event network-vif-plugged-71c1451f-de1d-4700-8326-282bc39bb6f7 for instance with vm_state shelved_offloaded and task_state spawning. [ 1419.378706] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.378880] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.379062] env[62627]: DEBUG nova.network.neutron [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1419.551723] env[62627]: DEBUG oslo_vmware.api [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14717} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.552114] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1419.552157] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1419.552331] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1419.552501] env[62627]: INFO nova.compute.manager [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1419.552736] env[62627]: DEBUG oslo.service.loopingcall [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1419.552914] env[62627]: DEBUG nova.compute.manager [-] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1419.553056] env[62627]: DEBUG nova.network.neutron [-] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1420.039676] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1420.040040] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-495e677f-fad7-4637-adbd-b805edf298b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.047341] env[62627]: DEBUG oslo_vmware.api [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1420.047341] env[62627]: value = "task-2195526" [ 1420.047341] env[62627]: _type = "Task" [ 1420.047341] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.056060] env[62627]: DEBUG oslo_vmware.api [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195526, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.113249] env[62627]: DEBUG nova.network.neutron [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updating instance_info_cache with network_info: [{"id": "71c1451f-de1d-4700-8326-282bc39bb6f7", "address": "fa:16:3e:fb:be:f1", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71c1451f-de", "ovs_interfaceid": "71c1451f-de1d-4700-8326-282bc39bb6f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.490630] env[62627]: DEBUG nova.network.neutron [-] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.557019] env[62627]: DEBUG oslo_vmware.api [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195526, 'name': PowerOffVM_Task, 'duration_secs': 0.191736} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.557309] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1420.557482] env[62627]: DEBUG nova.compute.manager [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1420.558366] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e018a1d8-5b90-41a7-9625-1858203aef07 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.616510] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.644705] env[62627]: DEBUG nova.virt.hardware [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9dfa27088cb2bd1ed7408c3a571f4127',container_format='bare',created_at=2025-01-30T20:41:59Z,direct_url=,disk_format='vmdk',id=96863610-a259-42f5-aff5-eef943337e28,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1590779513-shelved',owner='a9f3a36c557145e38c528a628b1f3b67',properties=ImageMetaProps,protected=,size=31670784,status='active',tags=,updated_at=2025-01-30T20:42:13Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1420.644996] env[62627]: DEBUG nova.virt.hardware [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1420.645213] env[62627]: DEBUG nova.virt.hardware [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1420.645434] env[62627]: DEBUG nova.virt.hardware [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1420.645614] env[62627]: DEBUG nova.virt.hardware [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1420.645791] env[62627]: DEBUG nova.virt.hardware [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1420.646043] env[62627]: DEBUG nova.virt.hardware [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1420.646245] env[62627]: DEBUG nova.virt.hardware [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1420.646429] env[62627]: DEBUG nova.virt.hardware [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1420.646597] env[62627]: DEBUG nova.virt.hardware [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1420.646771] env[62627]: DEBUG nova.virt.hardware [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1420.647706] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260e8ed8-ca7c-426b-b04f-0b162dbcaa9b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.655613] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1866b62d-c226-40dd-a229-d92e44495870 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.669174] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:be:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71c1451f-de1d-4700-8326-282bc39bb6f7', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1420.677171] env[62627]: DEBUG oslo.service.loopingcall [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1420.677436] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1420.677714] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1b14f9d-411b-4316-a602-3567b6d3a1af {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.695874] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1420.695874] env[62627]: value = "task-2195527" [ 1420.695874] env[62627]: _type = "Task" [ 1420.695874] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.702970] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195527, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.993118] env[62627]: INFO nova.compute.manager [-] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Took 1.44 seconds to deallocate network for instance. [ 1421.069226] env[62627]: DEBUG oslo_concurrency.lockutils [None req-16b7a775-f947-4637-b63b-6e0930404f77 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.045s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.206680] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195527, 'name': CreateVM_Task, 'duration_secs': 0.290133} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.206858] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1421.207554] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.207723] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.208134] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1421.208395] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-393b3648-6cb7-4e0b-993d-d937c69c695f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.213471] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1421.213471] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f1aaf7-d248-7e5f-fe18-b9b9954f4795" [ 1421.213471] env[62627]: _type = "Task" [ 1421.213471] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.221108] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f1aaf7-d248-7e5f-fe18-b9b9954f4795, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.327629] env[62627]: DEBUG nova.compute.manager [req-9237806f-d432-4ad7-b6d5-e621f4b23b72 req-7d522a48-802a-4e86-ae97-49a0508f87d1 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Received event network-changed-71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1421.327812] env[62627]: DEBUG nova.compute.manager [req-9237806f-d432-4ad7-b6d5-e621f4b23b72 req-7d522a48-802a-4e86-ae97-49a0508f87d1 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Refreshing instance network info cache due to event network-changed-71c1451f-de1d-4700-8326-282bc39bb6f7. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1421.328022] env[62627]: DEBUG oslo_concurrency.lockutils [req-9237806f-d432-4ad7-b6d5-e621f4b23b72 req-7d522a48-802a-4e86-ae97-49a0508f87d1 service nova] Acquiring lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.328163] env[62627]: DEBUG oslo_concurrency.lockutils [req-9237806f-d432-4ad7-b6d5-e621f4b23b72 req-7d522a48-802a-4e86-ae97-49a0508f87d1 service nova] Acquired lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.328327] env[62627]: DEBUG nova.network.neutron [req-9237806f-d432-4ad7-b6d5-e621f4b23b72 req-7d522a48-802a-4e86-ae97-49a0508f87d1 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Refreshing network info cache for port 71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1421.500911] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.501211] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.501355] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.535892] env[62627]: INFO nova.scheduler.client.report [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleted allocations for instance 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2 [ 1421.672442] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.672915] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.672915] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.673093] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.673265] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.675346] env[62627]: INFO nova.compute.manager [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Terminating instance [ 1421.723593] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.723813] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Processing image 96863610-a259-42f5-aff5-eef943337e28 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1421.724052] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28/96863610-a259-42f5-aff5-eef943337e28.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.724209] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28/96863610-a259-42f5-aff5-eef943337e28.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.724387] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1421.724609] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-607c1876-023d-404f-9488-8280ecac0e96 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.732534] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1421.732721] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1421.733384] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e220eff-6124-4ba1-95be-76cca3365c7d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.738157] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1421.738157] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5260a943-1bdc-45be-7990-adb0a07cad6e" [ 1421.738157] env[62627]: _type = "Task" [ 1421.738157] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.744987] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5260a943-1bdc-45be-7990-adb0a07cad6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.045366] env[62627]: DEBUG oslo_concurrency.lockutils [None req-5e8ff953-24cd-4fc1-b1b7-56f3354cb292 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "16ed0637-d24d-488d-b9ba-dd5a49a7b2a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.113s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.113893] env[62627]: DEBUG nova.network.neutron [req-9237806f-d432-4ad7-b6d5-e621f4b23b72 req-7d522a48-802a-4e86-ae97-49a0508f87d1 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updated VIF entry in instance network info cache for port 71c1451f-de1d-4700-8326-282bc39bb6f7. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1422.114047] env[62627]: DEBUG nova.network.neutron [req-9237806f-d432-4ad7-b6d5-e621f4b23b72 req-7d522a48-802a-4e86-ae97-49a0508f87d1 service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updating instance_info_cache with network_info: [{"id": "71c1451f-de1d-4700-8326-282bc39bb6f7", "address": "fa:16:3e:fb:be:f1", "network": {"id": "e5d8177b-3afb-46cb-88d9-d4777ffa4a39", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1184692155-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f3a36c557145e38c528a628b1f3b67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71c1451f-de", "ovs_interfaceid": "71c1451f-de1d-4700-8326-282bc39bb6f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.179309] env[62627]: DEBUG nova.compute.manager [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1422.179539] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1422.180513] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58da77b8-b6a1-4083-912a-637a961f1cdd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.188039] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1422.188279] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b8e4c2b-d824-4bc1-87c7-fbc512b9f279 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.247436] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1422.247687] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1422.247960] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleting the datastore file [datastore2] 6a53e7a7-ec4b-4a97-98f7-8d686df3178c {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1422.251274] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74c394d6-cc07-4fb4-9a1d-a8bb351af1cf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.253034] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Preparing fetch location {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1422.253267] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Fetch image to [datastore2] OSTACK_IMG_2fde281f-bdf9-41c2-bf7c-f7969a8b450e/OSTACK_IMG_2fde281f-bdf9-41c2-bf7c-f7969a8b450e.vmdk {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1422.253451] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Downloading stream optimized image 96863610-a259-42f5-aff5-eef943337e28 to [datastore2] OSTACK_IMG_2fde281f-bdf9-41c2-bf7c-f7969a8b450e/OSTACK_IMG_2fde281f-bdf9-41c2-bf7c-f7969a8b450e.vmdk on the data store datastore2 as vApp {{(pid=62627) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1422.253619] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Downloading image file data 96863610-a259-42f5-aff5-eef943337e28 to the ESX as VM named 'OSTACK_IMG_2fde281f-bdf9-41c2-bf7c-f7969a8b450e' {{(pid=62627) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1422.260395] env[62627]: DEBUG oslo_vmware.api [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1422.260395] env[62627]: value = "task-2195530" [ 1422.260395] env[62627]: _type = "Task" [ 1422.260395] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.267504] env[62627]: DEBUG oslo_vmware.api [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195530, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.326141] env[62627]: DEBUG oslo_vmware.rw_handles [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1422.326141] env[62627]: value = "resgroup-9" [ 1422.326141] env[62627]: _type = "ResourcePool" [ 1422.326141] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1422.326141] env[62627]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-7c066e7a-ac2b-49e9-875d-0850864593e7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.345362] env[62627]: DEBUG oslo_vmware.rw_handles [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lease: (returnval){ [ 1422.345362] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5201d37c-aa82-0dbf-f2fd-27d73db252f3" [ 1422.345362] env[62627]: _type = "HttpNfcLease" [ 1422.345362] env[62627]: } obtained for vApp import into resource pool (val){ [ 1422.345362] env[62627]: value = "resgroup-9" [ 1422.345362] env[62627]: _type = "ResourcePool" [ 1422.345362] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1422.345731] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the lease: (returnval){ [ 1422.345731] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5201d37c-aa82-0dbf-f2fd-27d73db252f3" [ 1422.345731] env[62627]: _type = "HttpNfcLease" [ 1422.345731] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1422.351451] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1422.351451] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5201d37c-aa82-0dbf-f2fd-27d73db252f3" [ 1422.351451] env[62627]: _type = "HttpNfcLease" [ 1422.351451] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1422.616959] env[62627]: DEBUG oslo_concurrency.lockutils [req-9237806f-d432-4ad7-b6d5-e621f4b23b72 req-7d522a48-802a-4e86-ae97-49a0508f87d1 service nova] Releasing lock "refresh_cache-8c170084-a7f8-4774-8f9d-7874103b0f4f" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.617208] env[62627]: DEBUG nova.compute.manager [req-9237806f-d432-4ad7-b6d5-e621f4b23b72 req-7d522a48-802a-4e86-ae97-49a0508f87d1 service nova] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Received event network-vif-deleted-4cbd817b-c3ef-4a33-8648-4297830602e0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1422.770797] env[62627]: DEBUG oslo_vmware.api [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195530, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.853693] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1422.853693] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5201d37c-aa82-0dbf-f2fd-27d73db252f3" [ 1422.853693] env[62627]: _type = "HttpNfcLease" [ 1422.853693] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1422.972124] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.972366] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.273306] env[62627]: DEBUG oslo_vmware.api [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195530, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.789568} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.273539] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1423.273763] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1423.273964] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1423.274159] env[62627]: INFO nova.compute.manager [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1423.274401] env[62627]: DEBUG oslo.service.loopingcall [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1423.274590] env[62627]: DEBUG nova.compute.manager [-] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1423.274681] env[62627]: DEBUG nova.network.neutron [-] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1423.354421] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1423.354421] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5201d37c-aa82-0dbf-f2fd-27d73db252f3" [ 1423.354421] env[62627]: _type = "HttpNfcLease" [ 1423.354421] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1423.354741] env[62627]: DEBUG oslo_vmware.rw_handles [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1423.354741] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5201d37c-aa82-0dbf-f2fd-27d73db252f3" [ 1423.354741] env[62627]: _type = "HttpNfcLease" [ 1423.354741] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1423.355571] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2830dc-1215-432b-9d15-a412a7b6b404 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.363782] env[62627]: DEBUG oslo_vmware.rw_handles [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527620df-9030-fa0f-0782-53a35a08fa00/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1423.363782] env[62627]: DEBUG oslo_vmware.rw_handles [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating HTTP connection to write to file with size = 31670784 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527620df-9030-fa0f-0782-53a35a08fa00/disk-0.vmdk. {{(pid=62627) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1423.429266] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3d1523f7-2af2-4f2c-8ffb-c130e9533fb4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.474639] env[62627]: DEBUG nova.compute.manager [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1423.572544] env[62627]: DEBUG nova.compute.manager [req-2ddf7313-be1b-4b29-bce7-0ce9611e859a req-76086712-dd2a-44e9-a79b-3771e3d84303 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Received event network-vif-deleted-9fa1aa42-f4a9-48e3-946a-48622c860506 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1423.572756] env[62627]: INFO nova.compute.manager [req-2ddf7313-be1b-4b29-bce7-0ce9611e859a req-76086712-dd2a-44e9-a79b-3771e3d84303 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Neutron deleted interface 9fa1aa42-f4a9-48e3-946a-48622c860506; detaching it from the instance and deleting it from the info cache [ 1423.573099] env[62627]: DEBUG nova.network.neutron [req-2ddf7313-be1b-4b29-bce7-0ce9611e859a req-76086712-dd2a-44e9-a79b-3771e3d84303 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.001134] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.001134] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.002099] env[62627]: INFO nova.compute.claims [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1424.054780] env[62627]: DEBUG nova.network.neutron [-] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.080288] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7c2b651-fbe0-4ad2-bff8-47365c9accef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.092405] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9ae8b7-65b4-4aae-ad18-5cf83e0cd5c9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.126164] env[62627]: DEBUG nova.compute.manager [req-2ddf7313-be1b-4b29-bce7-0ce9611e859a req-76086712-dd2a-44e9-a79b-3771e3d84303 service nova] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Detach interface failed, port_id=9fa1aa42-f4a9-48e3-946a-48622c860506, reason: Instance 6a53e7a7-ec4b-4a97-98f7-8d686df3178c could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1424.557887] env[62627]: INFO nova.compute.manager [-] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Took 1.28 seconds to deallocate network for instance. [ 1424.867679] env[62627]: DEBUG oslo_vmware.rw_handles [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Completed reading data from the image iterator. {{(pid=62627) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1424.868106] env[62627]: DEBUG oslo_vmware.rw_handles [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527620df-9030-fa0f-0782-53a35a08fa00/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1424.869190] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-909213a6-1c23-451a-bc7e-3eaf2ad95848 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.876689] env[62627]: DEBUG oslo_vmware.rw_handles [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527620df-9030-fa0f-0782-53a35a08fa00/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1424.876850] env[62627]: DEBUG oslo_vmware.rw_handles [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527620df-9030-fa0f-0782-53a35a08fa00/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1424.877106] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-bf7196aa-5523-4a13-be6d-e093728cb508 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.067835] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.092147] env[62627]: DEBUG oslo_vmware.rw_handles [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527620df-9030-fa0f-0782-53a35a08fa00/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1425.092147] env[62627]: INFO nova.virt.vmwareapi.images [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Downloaded image file data 96863610-a259-42f5-aff5-eef943337e28 [ 1425.092147] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9572c56-8516-4104-9c1d-d6d545a62a2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.112336] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfff3e2d-b24c-40c9-ab5b-c29f960fa94c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.114600] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee7822c-444b-4a33-b472-58a5002f9ebb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.123021] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5395334-ba88-4a78-a294-e456bc780686 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.153933] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cdbb45-8a8e-4bdb-85e3-a37b2b7bd108 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.156544] env[62627]: INFO nova.virt.vmwareapi.images [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] The imported VM was unregistered [ 1425.159140] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Caching image {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1425.159370] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Creating directory with path [datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1425.159626] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc466fef-8222-4a26-b6ed-8b76db9c6f05 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.166264] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26300cf-0a01-4e7e-9400-43e62a348977 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.171797] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Created directory with path [datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1425.172130] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_2fde281f-bdf9-41c2-bf7c-f7969a8b450e/OSTACK_IMG_2fde281f-bdf9-41c2-bf7c-f7969a8b450e.vmdk to [datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28/96863610-a259-42f5-aff5-eef943337e28.vmdk. {{(pid=62627) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1425.179645] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-40956a7d-8184-4ac9-916a-d54de23e9492 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.182056] env[62627]: DEBUG nova.compute.provider_tree [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1425.187953] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1425.187953] env[62627]: value = "task-2195533" [ 1425.187953] env[62627]: _type = "Task" [ 1425.187953] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.196292] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195533, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.685297] env[62627]: DEBUG nova.scheduler.client.report [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1425.699533] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195533, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.195547] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.195s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.196079] env[62627]: DEBUG nova.compute.manager [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1426.202343] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.135s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.202582] env[62627]: DEBUG nova.objects.instance [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lazy-loading 'resources' on Instance uuid 6a53e7a7-ec4b-4a97-98f7-8d686df3178c {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1426.203652] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195533, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.700019] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195533, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.705817] env[62627]: DEBUG nova.compute.utils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1426.711060] env[62627]: DEBUG nova.compute.manager [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1426.711060] env[62627]: DEBUG nova.network.neutron [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1426.772729] env[62627]: DEBUG nova.policy [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e72f59cc4a02460088e86ad29af1e0ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97c11d791dcf4f94a8e8bda7a6f014cd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1426.801614] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1be81aa-2b85-44ea-9657-02ca7e8552de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.809079] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769471b3-c522-42b6-832b-d6e50ab631cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.842101] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f05c86-7025-4c31-8389-04a6311a4faa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.850922] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9468ef-cfbc-4442-8831-c9a7cd9931db {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.865650] env[62627]: DEBUG nova.compute.provider_tree [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1427.094791] env[62627]: DEBUG nova.network.neutron [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Successfully created port: 238d30ee-1280-409d-89ff-3c112db3c2d4 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1427.203037] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195533, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.210779] env[62627]: DEBUG nova.compute.manager [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1427.369063] env[62627]: DEBUG nova.scheduler.client.report [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1427.703223] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195533, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.271423} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.703472] env[62627]: INFO nova.virt.vmwareapi.ds_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_2fde281f-bdf9-41c2-bf7c-f7969a8b450e/OSTACK_IMG_2fde281f-bdf9-41c2-bf7c-f7969a8b450e.vmdk to [datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28/96863610-a259-42f5-aff5-eef943337e28.vmdk. [ 1427.703669] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Cleaning up location [datastore2] OSTACK_IMG_2fde281f-bdf9-41c2-bf7c-f7969a8b450e {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1427.703841] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_2fde281f-bdf9-41c2-bf7c-f7969a8b450e {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1427.704193] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcacb551-2a05-4a70-a1d8-81f7ef544ade {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.711542] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1427.711542] env[62627]: value = "task-2195534" [ 1427.711542] env[62627]: _type = "Task" [ 1427.711542] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.724304] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195534, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.873571] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.899087] env[62627]: INFO nova.scheduler.client.report [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted allocations for instance 6a53e7a7-ec4b-4a97-98f7-8d686df3178c [ 1428.223766] env[62627]: DEBUG nova.compute.manager [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1428.224921] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195534, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036131} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.225355] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1428.225532] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28/96863610-a259-42f5-aff5-eef943337e28.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.225766] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28/96863610-a259-42f5-aff5-eef943337e28.vmdk to [datastore2] 8c170084-a7f8-4774-8f9d-7874103b0f4f/8c170084-a7f8-4774-8f9d-7874103b0f4f.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1428.226249] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc0b7b00-408d-4002-8fed-b56b577b69a8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.233125] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1428.233125] env[62627]: value = "task-2195535" [ 1428.233125] env[62627]: _type = "Task" [ 1428.233125] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.240816] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195535, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.249893] env[62627]: DEBUG nova.virt.hardware [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1428.250189] env[62627]: DEBUG nova.virt.hardware [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1428.250367] env[62627]: DEBUG nova.virt.hardware [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1428.250551] env[62627]: DEBUG nova.virt.hardware [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1428.250709] env[62627]: DEBUG nova.virt.hardware [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1428.250856] env[62627]: DEBUG nova.virt.hardware [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1428.251075] env[62627]: DEBUG nova.virt.hardware [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1428.251241] env[62627]: DEBUG nova.virt.hardware [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1428.251408] env[62627]: DEBUG nova.virt.hardware [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1428.251571] env[62627]: DEBUG nova.virt.hardware [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1428.251744] env[62627]: DEBUG nova.virt.hardware [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1428.252546] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b80a0e1-47ef-4ed9-b70d-b8457e58b412 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.260185] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4034b270-8d01-4d5a-b63f-a2311e31004c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.407390] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0ab48932-56c7-491b-8c4e-7e4f593c6f58 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "6a53e7a7-ec4b-4a97-98f7-8d686df3178c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.735s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.519368] env[62627]: DEBUG nova.compute.manager [req-21c17654-31ed-4c65-8e74-8793d115209f req-f87ef381-d3f9-4a39-96ed-cb4103d05496 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Received event network-vif-plugged-238d30ee-1280-409d-89ff-3c112db3c2d4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1428.519614] env[62627]: DEBUG oslo_concurrency.lockutils [req-21c17654-31ed-4c65-8e74-8793d115209f req-f87ef381-d3f9-4a39-96ed-cb4103d05496 service nova] Acquiring lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.519803] env[62627]: DEBUG oslo_concurrency.lockutils [req-21c17654-31ed-4c65-8e74-8793d115209f req-f87ef381-d3f9-4a39-96ed-cb4103d05496 service nova] Lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.520037] env[62627]: DEBUG oslo_concurrency.lockutils [req-21c17654-31ed-4c65-8e74-8793d115209f req-f87ef381-d3f9-4a39-96ed-cb4103d05496 service nova] Lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.520205] env[62627]: DEBUG nova.compute.manager [req-21c17654-31ed-4c65-8e74-8793d115209f req-f87ef381-d3f9-4a39-96ed-cb4103d05496 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] No waiting events found dispatching network-vif-plugged-238d30ee-1280-409d-89ff-3c112db3c2d4 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1428.520376] env[62627]: WARNING nova.compute.manager [req-21c17654-31ed-4c65-8e74-8793d115209f req-f87ef381-d3f9-4a39-96ed-cb4103d05496 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Received unexpected event network-vif-plugged-238d30ee-1280-409d-89ff-3c112db3c2d4 for instance with vm_state building and task_state spawning. [ 1428.617265] env[62627]: DEBUG nova.network.neutron [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Successfully updated port: 238d30ee-1280-409d-89ff-3c112db3c2d4 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1428.743345] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195535, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.118917] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.119091] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.119391] env[62627]: DEBUG nova.network.neutron [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1429.245598] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195535, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.655755] env[62627]: DEBUG nova.network.neutron [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1429.745543] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195535, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.751147] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.751554] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.751757] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.751938] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.752121] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.754221] env[62627]: INFO nova.compute.manager [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Terminating instance [ 1429.796660] env[62627]: DEBUG nova.network.neutron [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Updating instance_info_cache with network_info: [{"id": "238d30ee-1280-409d-89ff-3c112db3c2d4", "address": "fa:16:3e:51:1b:2f", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap238d30ee-12", "ovs_interfaceid": "238d30ee-1280-409d-89ff-3c112db3c2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.246187] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195535, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.258085] env[62627]: DEBUG nova.compute.manager [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1430.258316] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1430.259241] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b805fb90-6fba-4cc6-85c3-d7c1f0ebf27e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.266968] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1430.267265] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d1985f1-0842-4417-9fda-4d103a39ac5e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.275403] env[62627]: DEBUG oslo_vmware.api [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1430.275403] env[62627]: value = "task-2195536" [ 1430.275403] env[62627]: _type = "Task" [ 1430.275403] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.284775] env[62627]: DEBUG oslo_vmware.api [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.299644] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.300070] env[62627]: DEBUG nova.compute.manager [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Instance network_info: |[{"id": "238d30ee-1280-409d-89ff-3c112db3c2d4", "address": "fa:16:3e:51:1b:2f", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap238d30ee-12", "ovs_interfaceid": "238d30ee-1280-409d-89ff-3c112db3c2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1430.300570] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:1b:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '238d30ee-1280-409d-89ff-3c112db3c2d4', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1430.308672] env[62627]: DEBUG oslo.service.loopingcall [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1430.308987] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1430.309248] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f15b4e35-54f4-42a4-8ba9-61a3fa33733d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.330867] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1430.330867] env[62627]: value = "task-2195537" [ 1430.330867] env[62627]: _type = "Task" [ 1430.330867] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.338716] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195537, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.547143] env[62627]: DEBUG nova.compute.manager [req-dfda5901-fc2c-48a5-b04c-8253f745ae50 req-9d9a07ef-12ee-44fe-96e1-7ccc3b941415 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Received event network-changed-238d30ee-1280-409d-89ff-3c112db3c2d4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1430.547143] env[62627]: DEBUG nova.compute.manager [req-dfda5901-fc2c-48a5-b04c-8253f745ae50 req-9d9a07ef-12ee-44fe-96e1-7ccc3b941415 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Refreshing instance network info cache due to event network-changed-238d30ee-1280-409d-89ff-3c112db3c2d4. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1430.547322] env[62627]: DEBUG oslo_concurrency.lockutils [req-dfda5901-fc2c-48a5-b04c-8253f745ae50 req-9d9a07ef-12ee-44fe-96e1-7ccc3b941415 service nova] Acquiring lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.547518] env[62627]: DEBUG oslo_concurrency.lockutils [req-dfda5901-fc2c-48a5-b04c-8253f745ae50 req-9d9a07ef-12ee-44fe-96e1-7ccc3b941415 service nova] Acquired lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.547777] env[62627]: DEBUG nova.network.neutron [req-dfda5901-fc2c-48a5-b04c-8253f745ae50 req-9d9a07ef-12ee-44fe-96e1-7ccc3b941415 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Refreshing network info cache for port 238d30ee-1280-409d-89ff-3c112db3c2d4 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1430.745491] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195535, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.214391} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.745742] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/96863610-a259-42f5-aff5-eef943337e28/96863610-a259-42f5-aff5-eef943337e28.vmdk to [datastore2] 8c170084-a7f8-4774-8f9d-7874103b0f4f/8c170084-a7f8-4774-8f9d-7874103b0f4f.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1430.746561] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2a817d-a316-4168-a015-f72727840c8c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.768337] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 8c170084-a7f8-4774-8f9d-7874103b0f4f/8c170084-a7f8-4774-8f9d-7874103b0f4f.vmdk or device None with type streamOptimized {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1430.768565] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82680147-0b6e-4972-b5f0-fb50d2804c3e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.789246] env[62627]: DEBUG oslo_vmware.api [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195536, 'name': PowerOffVM_Task, 'duration_secs': 0.267006} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.790300] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1430.790471] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1430.790749] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1430.790749] env[62627]: value = "task-2195538" [ 1430.790749] env[62627]: _type = "Task" [ 1430.790749] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.790921] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-669a167b-19fe-4768-8469-ad6048e867e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.799228] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195538, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.839977] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195537, 'name': CreateVM_Task, 'duration_secs': 0.405125} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.840232] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1430.840869] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.841063] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.841440] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1430.841717] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e706731-0fc8-41a4-9961-fe1c946cc812 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.846081] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1430.846081] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522a1df1-9d6f-d3ac-bdd5-b4da8b38d972" [ 1430.846081] env[62627]: _type = "Task" [ 1430.846081] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.849833] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1430.850046] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1430.850241] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleting the datastore file [datastore2] 00bb8c04-3c0b-48ff-8127-13d3e15cb86c {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1430.850822] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd256063-39aa-45b8-ae54-631de35d4412 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.855525] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522a1df1-9d6f-d3ac-bdd5-b4da8b38d972, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.859385] env[62627]: DEBUG oslo_vmware.api [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for the task: (returnval){ [ 1430.859385] env[62627]: value = "task-2195540" [ 1430.859385] env[62627]: _type = "Task" [ 1430.859385] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.866242] env[62627]: DEBUG oslo_vmware.api [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.236221] env[62627]: DEBUG nova.network.neutron [req-dfda5901-fc2c-48a5-b04c-8253f745ae50 req-9d9a07ef-12ee-44fe-96e1-7ccc3b941415 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Updated VIF entry in instance network info cache for port 238d30ee-1280-409d-89ff-3c112db3c2d4. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1431.236609] env[62627]: DEBUG nova.network.neutron [req-dfda5901-fc2c-48a5-b04c-8253f745ae50 req-9d9a07ef-12ee-44fe-96e1-7ccc3b941415 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Updating instance_info_cache with network_info: [{"id": "238d30ee-1280-409d-89ff-3c112db3c2d4", "address": "fa:16:3e:51:1b:2f", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap238d30ee-12", "ovs_interfaceid": "238d30ee-1280-409d-89ff-3c112db3c2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.303714] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195538, 'name': ReconfigVM_Task, 'duration_secs': 0.283771} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.304147] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 8c170084-a7f8-4774-8f9d-7874103b0f4f/8c170084-a7f8-4774-8f9d-7874103b0f4f.vmdk or device None with type streamOptimized {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1431.304723] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36e83339-c68d-4440-aaf6-02e05be94f86 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.310891] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1431.310891] env[62627]: value = "task-2195541" [ 1431.310891] env[62627]: _type = "Task" [ 1431.310891] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.318231] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195541, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.355191] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522a1df1-9d6f-d3ac-bdd5-b4da8b38d972, 'name': SearchDatastore_Task, 'duration_secs': 0.008611} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.355484] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.355733] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1431.355953] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.356123] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.356321] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1431.356563] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd7a1cd5-a647-4138-881d-787ea15d3c21 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.364229] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1431.364399] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1431.367599] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c09982d-66ef-426f-9bf7-7966ab037044 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.369589] env[62627]: DEBUG oslo_vmware.api [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Task: {'id': task-2195540, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125106} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.369813] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1431.369989] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1431.370213] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1431.370388] env[62627]: INFO nova.compute.manager [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1431.370613] env[62627]: DEBUG oslo.service.loopingcall [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1431.371116] env[62627]: DEBUG nova.compute.manager [-] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1431.371237] env[62627]: DEBUG nova.network.neutron [-] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1431.373667] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1431.373667] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520c777e-5b31-ce07-bc52-622d7cd5229e" [ 1431.373667] env[62627]: _type = "Task" [ 1431.373667] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.381346] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520c777e-5b31-ce07-bc52-622d7cd5229e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.742065] env[62627]: DEBUG oslo_concurrency.lockutils [req-dfda5901-fc2c-48a5-b04c-8253f745ae50 req-9d9a07ef-12ee-44fe-96e1-7ccc3b941415 service nova] Releasing lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.821093] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195541, 'name': Rename_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.883744] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520c777e-5b31-ce07-bc52-622d7cd5229e, 'name': SearchDatastore_Task, 'duration_secs': 0.011721} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.884539] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ef3c12d-29d1-4604-8da0-e6f27b69bcbd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.889728] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1431.889728] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5280ab70-2059-d189-f5f6-0247bee6681a" [ 1431.889728] env[62627]: _type = "Task" [ 1431.889728] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.898388] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5280ab70-2059-d189-f5f6-0247bee6681a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.071679] env[62627]: DEBUG nova.network.neutron [-] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.321132] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195541, 'name': Rename_Task, 'duration_secs': 0.561665} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.321398] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1432.321629] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1afbf445-16a4-4f53-be1d-a6dd63ca5614 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.327685] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1432.327685] env[62627]: value = "task-2195542" [ 1432.327685] env[62627]: _type = "Task" [ 1432.327685] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.334881] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195542, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.399395] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5280ab70-2059-d189-f5f6-0247bee6681a, 'name': SearchDatastore_Task, 'duration_secs': 0.009096} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.399644] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.399896] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] cdff04c1-ec74-4afa-9f8c-7e1acbe9f336/cdff04c1-ec74-4afa-9f8c-7e1acbe9f336.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1432.400181] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-522f27fd-55b1-4100-9b8f-ccaa0e799506 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.406016] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1432.406016] env[62627]: value = "task-2195543" [ 1432.406016] env[62627]: _type = "Task" [ 1432.406016] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.413520] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195543, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.574260] env[62627]: INFO nova.compute.manager [-] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Took 1.20 seconds to deallocate network for instance. [ 1432.575436] env[62627]: DEBUG nova.compute.manager [req-01ef2cad-33ce-4c9a-a491-cb48f4398690 req-ec18b0fd-ec15-4e54-adf7-4072c7e00efb service nova] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Received event network-vif-deleted-d1a62857-50ae-4f10-8206-bfd58d9da6e0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1432.841639] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195542, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.916539] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195543, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44804} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.916844] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] cdff04c1-ec74-4afa-9f8c-7e1acbe9f336/cdff04c1-ec74-4afa-9f8c-7e1acbe9f336.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1432.917021] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1432.917279] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-695a9bb5-fd85-41d3-aecf-29eb8c92e78a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.923445] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1432.923445] env[62627]: value = "task-2195544" [ 1432.923445] env[62627]: _type = "Task" [ 1432.923445] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.930912] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195544, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.082459] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.082750] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.082971] env[62627]: DEBUG nova.objects.instance [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lazy-loading 'resources' on Instance uuid 00bb8c04-3c0b-48ff-8127-13d3e15cb86c {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1433.338409] env[62627]: DEBUG oslo_vmware.api [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195542, 'name': PowerOnVM_Task, 'duration_secs': 0.58179} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.338861] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1433.433474] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195544, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117819} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.433762] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1433.434471] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8ea8e9-5865-4b2f-a5f2-1be41744a878 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.457919] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] cdff04c1-ec74-4afa-9f8c-7e1acbe9f336/cdff04c1-ec74-4afa-9f8c-7e1acbe9f336.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1433.458914] env[62627]: DEBUG nova.compute.manager [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1433.459182] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63298ed0-3beb-4b86-88b5-02ee557bf8d4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.473281] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d5dbde-baa6-4322-a433-ebd7545407d6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.483890] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1433.483890] env[62627]: value = "task-2195545" [ 1433.483890] env[62627]: _type = "Task" [ 1433.483890] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.491934] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195545, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.650123] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44337dee-4ce8-459f-8798-1c5e73b8c726 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.658194] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1ad0bf-e126-4815-9022-31da8bbf60c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.688437] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be32c2bb-e978-49b4-95b8-194588c1d6ed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.695538] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830da95e-b25f-4141-b8b2-3da0b4fbd3f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.708088] env[62627]: DEBUG nova.compute.provider_tree [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.994574] env[62627]: DEBUG oslo_concurrency.lockutils [None req-95505125-9369-48fd-90ed-b2294140cdf5 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 21.034s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.999261] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195545, 'name': ReconfigVM_Task, 'duration_secs': 0.286057} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.999523] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Reconfigured VM instance instance-00000078 to attach disk [datastore2] cdff04c1-ec74-4afa-9f8c-7e1acbe9f336/cdff04c1-ec74-4afa-9f8c-7e1acbe9f336.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1434.000156] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23aebc5b-4523-45da-8052-2ad203dc0828 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.005850] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1434.005850] env[62627]: value = "task-2195546" [ 1434.005850] env[62627]: _type = "Task" [ 1434.005850] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.014732] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195546, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.211109] env[62627]: DEBUG nova.scheduler.client.report [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1434.515373] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195546, 'name': Rename_Task, 'duration_secs': 0.233186} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.515672] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1434.515914] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b52dc50-cb9d-4bd0-9d77-031bc17a5ecf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.522449] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1434.522449] env[62627]: value = "task-2195547" [ 1434.522449] env[62627]: _type = "Task" [ 1434.522449] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.529466] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195547, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.716413] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.633s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.735745] env[62627]: INFO nova.scheduler.client.report [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Deleted allocations for instance 00bb8c04-3c0b-48ff-8127-13d3e15cb86c [ 1435.031889] env[62627]: DEBUG oslo_vmware.api [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195547, 'name': PowerOnVM_Task, 'duration_secs': 0.435815} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.032181] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1435.032427] env[62627]: INFO nova.compute.manager [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Took 6.81 seconds to spawn the instance on the hypervisor. [ 1435.032649] env[62627]: DEBUG nova.compute.manager [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1435.033425] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab7d44f-5d2e-46b4-9798-e444ce61bdc5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.243193] env[62627]: DEBUG oslo_concurrency.lockutils [None req-a077a8e6-afd7-4458-a874-54f1a8396027 tempest-ServersTestJSON-494939627 tempest-ServersTestJSON-494939627-project-member] Lock "00bb8c04-3c0b-48ff-8127-13d3e15cb86c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.491s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.338373] env[62627]: DEBUG oslo_concurrency.lockutils [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.338635] env[62627]: DEBUG oslo_concurrency.lockutils [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.338843] env[62627]: DEBUG oslo_concurrency.lockutils [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.339034] env[62627]: DEBUG oslo_concurrency.lockutils [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.339233] env[62627]: DEBUG oslo_concurrency.lockutils [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.341243] env[62627]: INFO nova.compute.manager [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Terminating instance [ 1435.548955] env[62627]: INFO nova.compute.manager [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Took 11.57 seconds to build instance. [ 1435.844837] env[62627]: DEBUG nova.compute.manager [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1435.845088] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1435.846065] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2cfbe7-440e-4b74-90e4-2f52cbfb86c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.855181] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1435.855549] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bba4cfd7-f447-4292-858e-4d833de603f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.861679] env[62627]: DEBUG oslo_vmware.api [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1435.861679] env[62627]: value = "task-2195548" [ 1435.861679] env[62627]: _type = "Task" [ 1435.861679] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.870374] env[62627]: DEBUG oslo_vmware.api [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.051395] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4c7e8e7a-3f8e-4921-8e93-5a68a5f3134a tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.079s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.372016] env[62627]: DEBUG oslo_vmware.api [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195548, 'name': PowerOffVM_Task, 'duration_secs': 0.233549} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.372324] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1436.372495] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1436.372739] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd0b65e6-bf13-42b9-9976-0cb597f7146b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.411294] env[62627]: DEBUG nova.compute.manager [req-4f140ae1-7019-4bad-bb3f-eda646807cf4 req-293172cc-a7ed-41bc-a013-4d2cd4fec1f0 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Received event network-changed-238d30ee-1280-409d-89ff-3c112db3c2d4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1436.411528] env[62627]: DEBUG nova.compute.manager [req-4f140ae1-7019-4bad-bb3f-eda646807cf4 req-293172cc-a7ed-41bc-a013-4d2cd4fec1f0 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Refreshing instance network info cache due to event network-changed-238d30ee-1280-409d-89ff-3c112db3c2d4. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1436.411749] env[62627]: DEBUG oslo_concurrency.lockutils [req-4f140ae1-7019-4bad-bb3f-eda646807cf4 req-293172cc-a7ed-41bc-a013-4d2cd4fec1f0 service nova] Acquiring lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.411896] env[62627]: DEBUG oslo_concurrency.lockutils [req-4f140ae1-7019-4bad-bb3f-eda646807cf4 req-293172cc-a7ed-41bc-a013-4d2cd4fec1f0 service nova] Acquired lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.412071] env[62627]: DEBUG nova.network.neutron [req-4f140ae1-7019-4bad-bb3f-eda646807cf4 req-293172cc-a7ed-41bc-a013-4d2cd4fec1f0 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Refreshing network info cache for port 238d30ee-1280-409d-89ff-3c112db3c2d4 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1436.433383] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1436.433594] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1436.433797] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleting the datastore file [datastore2] 8c170084-a7f8-4774-8f9d-7874103b0f4f {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1436.434065] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4201291f-76aa-468c-ad24-13f25f0e8baf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.441437] env[62627]: DEBUG oslo_vmware.api [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for the task: (returnval){ [ 1436.441437] env[62627]: value = "task-2195550" [ 1436.441437] env[62627]: _type = "Task" [ 1436.441437] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.449193] env[62627]: DEBUG oslo_vmware.api [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.953120] env[62627]: DEBUG oslo_vmware.api [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.193951] env[62627]: DEBUG nova.network.neutron [req-4f140ae1-7019-4bad-bb3f-eda646807cf4 req-293172cc-a7ed-41bc-a013-4d2cd4fec1f0 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Updated VIF entry in instance network info cache for port 238d30ee-1280-409d-89ff-3c112db3c2d4. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1437.193951] env[62627]: DEBUG nova.network.neutron [req-4f140ae1-7019-4bad-bb3f-eda646807cf4 req-293172cc-a7ed-41bc-a013-4d2cd4fec1f0 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Updating instance_info_cache with network_info: [{"id": "238d30ee-1280-409d-89ff-3c112db3c2d4", "address": "fa:16:3e:51:1b:2f", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap238d30ee-12", "ovs_interfaceid": "238d30ee-1280-409d-89ff-3c112db3c2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.452459] env[62627]: DEBUG oslo_vmware.api [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Task: {'id': task-2195550, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.682549} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.452759] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1437.452955] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1437.453158] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1437.453343] env[62627]: INFO nova.compute.manager [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Took 1.61 seconds to destroy the instance on the hypervisor. [ 1437.453588] env[62627]: DEBUG oslo.service.loopingcall [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1437.453782] env[62627]: DEBUG nova.compute.manager [-] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1437.453883] env[62627]: DEBUG nova.network.neutron [-] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1437.696851] env[62627]: DEBUG oslo_concurrency.lockutils [req-4f140ae1-7019-4bad-bb3f-eda646807cf4 req-293172cc-a7ed-41bc-a013-4d2cd4fec1f0 service nova] Releasing lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.882167] env[62627]: DEBUG nova.compute.manager [req-9a630151-f309-4fc9-93b0-855d64aa614e req-2ef1b9e4-d910-4208-baa2-3eddebd3196c service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Received event network-vif-deleted-71c1451f-de1d-4700-8326-282bc39bb6f7 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1437.882167] env[62627]: INFO nova.compute.manager [req-9a630151-f309-4fc9-93b0-855d64aa614e req-2ef1b9e4-d910-4208-baa2-3eddebd3196c service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Neutron deleted interface 71c1451f-de1d-4700-8326-282bc39bb6f7; detaching it from the instance and deleting it from the info cache [ 1437.882167] env[62627]: DEBUG nova.network.neutron [req-9a630151-f309-4fc9-93b0-855d64aa614e req-2ef1b9e4-d910-4208-baa2-3eddebd3196c service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.360350] env[62627]: DEBUG nova.network.neutron [-] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.385148] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d27f2f99-2922-4afe-8e1a-4df5078d93f2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.395145] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03464556-4a53-46a8-8a16-c851b0797be9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.420186] env[62627]: DEBUG nova.compute.manager [req-9a630151-f309-4fc9-93b0-855d64aa614e req-2ef1b9e4-d910-4208-baa2-3eddebd3196c service nova] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Detach interface failed, port_id=71c1451f-de1d-4700-8326-282bc39bb6f7, reason: Instance 8c170084-a7f8-4774-8f9d-7874103b0f4f could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1438.863915] env[62627]: INFO nova.compute.manager [-] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Took 1.41 seconds to deallocate network for instance. [ 1439.371056] env[62627]: DEBUG oslo_concurrency.lockutils [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1439.371402] env[62627]: DEBUG oslo_concurrency.lockutils [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.371447] env[62627]: DEBUG nova.objects.instance [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lazy-loading 'resources' on Instance uuid 8c170084-a7f8-4774-8f9d-7874103b0f4f {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1439.922960] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002562e5-a32e-4415-b7c4-8818705b96fd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.930627] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3eca70-26a4-4995-93f0-91e519dac2ce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.961045] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db986a3b-ff5a-4d0b-8ffa-ce56de703dd9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.967781] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbaffde7-f953-4537-bf62-a7d16aae7b33 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.980237] env[62627]: DEBUG nova.compute.provider_tree [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1440.483252] env[62627]: DEBUG nova.scheduler.client.report [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1440.988233] env[62627]: DEBUG oslo_concurrency.lockutils [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.617s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.008677] env[62627]: INFO nova.scheduler.client.report [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Deleted allocations for instance 8c170084-a7f8-4774-8f9d-7874103b0f4f [ 1441.517486] env[62627]: DEBUG oslo_concurrency.lockutils [None req-610fc2ee-c2bf-4234-b267-e4c855f934c9 tempest-ServerActionsTestOtherB-764568701 tempest-ServerActionsTestOtherB-764568701-project-member] Lock "8c170084-a7f8-4774-8f9d-7874103b0f4f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.179s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.261075] env[62627]: DEBUG oslo_concurrency.lockutils [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "77b68106-1abe-426b-89ce-3495776b9233" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.261075] env[62627]: DEBUG oslo_concurrency.lockutils [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.261075] env[62627]: INFO nova.compute.manager [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Shelving [ 1444.272145] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1444.272554] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a60fcd1-dfe3-42a2-a0e8-3727991e4bee {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.281818] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1444.281818] env[62627]: value = "task-2195552" [ 1444.281818] env[62627]: _type = "Task" [ 1444.281818] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.290269] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195552, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.791273] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195552, 'name': PowerOffVM_Task, 'duration_secs': 0.219334} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.791564] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1444.792371] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c752ac-44d2-41a4-8e96-cd510b4bfbab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.810499] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a456ade0-0173-4153-b437-2d8938357bd0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.329383] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1445.329739] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-504ba178-a7d1-4393-a1ad-c652962f6986 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.337023] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1445.337023] env[62627]: value = "task-2195553" [ 1445.337023] env[62627]: _type = "Task" [ 1445.337023] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.345213] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195553, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.846762] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195553, 'name': CreateSnapshot_Task, 'duration_secs': 0.468862} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.847044] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1445.847764] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958d8326-ad4b-412e-80ee-47230657675d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.364729] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1446.365030] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6658c4a8-efe0-4412-998b-e0c1ed24bab5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.375096] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1446.375096] env[62627]: value = "task-2195554" [ 1446.375096] env[62627]: _type = "Task" [ 1446.375096] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.382808] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195554, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.886526] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195554, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.387136] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195554, 'name': CloneVM_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.887922] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195554, 'name': CloneVM_Task, 'duration_secs': 1.073567} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.888239] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Created linked-clone VM from snapshot [ 1447.889025] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa83bc0c-8c5a-466e-b639-5c849c9bae5e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.896691] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Uploading image 8c1fe05c-cd55-465a-a01d-4fcf29f67dd5 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1447.928276] env[62627]: DEBUG oslo_vmware.rw_handles [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1447.928276] env[62627]: value = "vm-447857" [ 1447.928276] env[62627]: _type = "VirtualMachine" [ 1447.928276] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1447.928812] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-cb848ba1-44be-4f0b-8398-4969c6cb8915 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.939275] env[62627]: DEBUG oslo_vmware.rw_handles [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lease: (returnval){ [ 1447.939275] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d72b05-cd07-6c15-6262-9967503c622b" [ 1447.939275] env[62627]: _type = "HttpNfcLease" [ 1447.939275] env[62627]: } obtained for exporting VM: (result){ [ 1447.939275] env[62627]: value = "vm-447857" [ 1447.939275] env[62627]: _type = "VirtualMachine" [ 1447.939275] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1447.939275] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the lease: (returnval){ [ 1447.939275] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d72b05-cd07-6c15-6262-9967503c622b" [ 1447.939275] env[62627]: _type = "HttpNfcLease" [ 1447.939275] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1447.951328] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1447.951328] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d72b05-cd07-6c15-6262-9967503c622b" [ 1447.951328] env[62627]: _type = "HttpNfcLease" [ 1447.951328] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1447.951619] env[62627]: DEBUG oslo_vmware.rw_handles [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1447.951619] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52d72b05-cd07-6c15-6262-9967503c622b" [ 1447.951619] env[62627]: _type = "HttpNfcLease" [ 1447.951619] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1447.953384] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6d58c5-263a-4a84-99b6-4df16b02401b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.961601] env[62627]: DEBUG oslo_vmware.rw_handles [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5231bb51-5c24-6676-a19f-34e4fbe5ff41/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1447.961601] env[62627]: DEBUG oslo_vmware.rw_handles [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5231bb51-5c24-6676-a19f-34e4fbe5ff41/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1448.054474] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d83811ba-8fbe-488e-a68f-5d3ddb8b49ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.508691] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.509046] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.012245] env[62627]: DEBUG nova.compute.manager [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1450.534703] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.534984] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.537095] env[62627]: INFO nova.compute.claims [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1451.601519] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9c6b0f-1aaa-4b8e-8920-37d88439e8d1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.609760] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc0c384-17e0-4fa2-afef-1ea93c005b0a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.641564] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165e61d4-3568-4bdf-89c5-1495b277ccd8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.649260] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778ed083-efa3-4d2b-a13f-7e9f7e7c4306 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.663055] env[62627]: DEBUG nova.compute.provider_tree [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1452.166251] env[62627]: DEBUG nova.scheduler.client.report [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1452.671375] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.136s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.672027] env[62627]: DEBUG nova.compute.manager [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1453.177433] env[62627]: DEBUG nova.compute.utils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1453.178973] env[62627]: DEBUG nova.compute.manager [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1453.179189] env[62627]: DEBUG nova.network.neutron [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1453.242383] env[62627]: DEBUG nova.policy [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a247050ec104a58901c4f14acc41542', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b88e3a0d8ee4e3682bd312872dcbc0f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1453.539422] env[62627]: DEBUG nova.network.neutron [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Successfully created port: b7ce94b7-c449-47bc-b068-3f8eca9f22f0 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1453.683015] env[62627]: DEBUG nova.compute.manager [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1454.694857] env[62627]: DEBUG nova.compute.manager [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1454.723404] env[62627]: DEBUG nova.virt.hardware [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1454.723698] env[62627]: DEBUG nova.virt.hardware [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1454.723958] env[62627]: DEBUG nova.virt.hardware [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1454.724581] env[62627]: DEBUG nova.virt.hardware [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1454.724581] env[62627]: DEBUG nova.virt.hardware [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1454.724807] env[62627]: DEBUG nova.virt.hardware [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1454.725092] env[62627]: DEBUG nova.virt.hardware [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1454.725297] env[62627]: DEBUG nova.virt.hardware [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1454.725475] env[62627]: DEBUG nova.virt.hardware [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1454.725640] env[62627]: DEBUG nova.virt.hardware [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1454.725811] env[62627]: DEBUG nova.virt.hardware [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1454.726715] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9f9325-b65a-4082-a0ca-129bf850c4fa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.734683] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f937a3-71f8-42f2-b65f-bac64baee403 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.783050] env[62627]: DEBUG oslo_vmware.rw_handles [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5231bb51-5c24-6676-a19f-34e4fbe5ff41/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1454.783938] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d814fc38-4343-448c-a972-1dda3e8d275f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.789985] env[62627]: DEBUG oslo_vmware.rw_handles [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5231bb51-5c24-6676-a19f-34e4fbe5ff41/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1454.790155] env[62627]: ERROR oslo_vmware.rw_handles [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5231bb51-5c24-6676-a19f-34e4fbe5ff41/disk-0.vmdk due to incomplete transfer. [ 1454.790390] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-aeedb474-4487-4de1-bac3-c04762879966 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.796923] env[62627]: DEBUG oslo_vmware.rw_handles [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5231bb51-5c24-6676-a19f-34e4fbe5ff41/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1454.797131] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Uploaded image 8c1fe05c-cd55-465a-a01d-4fcf29f67dd5 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1454.799553] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1454.799793] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-18468c06-768c-4f82-8e22-027092d4fa69 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.805878] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1454.805878] env[62627]: value = "task-2195556" [ 1454.805878] env[62627]: _type = "Task" [ 1454.805878] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.813586] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195556, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.905188] env[62627]: DEBUG nova.compute.manager [req-c38198ed-b90b-40c8-96bc-c9cc013f3de0 req-092dda68-2750-4a04-83fa-c6561e395ca0 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Received event network-vif-plugged-b7ce94b7-c449-47bc-b068-3f8eca9f22f0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1454.905429] env[62627]: DEBUG oslo_concurrency.lockutils [req-c38198ed-b90b-40c8-96bc-c9cc013f3de0 req-092dda68-2750-4a04-83fa-c6561e395ca0 service nova] Acquiring lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.905606] env[62627]: DEBUG oslo_concurrency.lockutils [req-c38198ed-b90b-40c8-96bc-c9cc013f3de0 req-092dda68-2750-4a04-83fa-c6561e395ca0 service nova] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.905802] env[62627]: DEBUG oslo_concurrency.lockutils [req-c38198ed-b90b-40c8-96bc-c9cc013f3de0 req-092dda68-2750-4a04-83fa-c6561e395ca0 service nova] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.905970] env[62627]: DEBUG nova.compute.manager [req-c38198ed-b90b-40c8-96bc-c9cc013f3de0 req-092dda68-2750-4a04-83fa-c6561e395ca0 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] No waiting events found dispatching network-vif-plugged-b7ce94b7-c449-47bc-b068-3f8eca9f22f0 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1454.906146] env[62627]: WARNING nova.compute.manager [req-c38198ed-b90b-40c8-96bc-c9cc013f3de0 req-092dda68-2750-4a04-83fa-c6561e395ca0 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Received unexpected event network-vif-plugged-b7ce94b7-c449-47bc-b068-3f8eca9f22f0 for instance with vm_state building and task_state spawning. [ 1455.315860] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195556, 'name': Destroy_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.464394] env[62627]: DEBUG nova.network.neutron [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Successfully updated port: b7ce94b7-c449-47bc-b068-3f8eca9f22f0 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1455.481827] env[62627]: DEBUG nova.compute.manager [req-b0de03c3-8a12-449a-9cc7-1cbd47e8878d req-27cfbdbb-ef09-423c-9839-f0befb72556b service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Received event network-changed-b7ce94b7-c449-47bc-b068-3f8eca9f22f0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1455.482089] env[62627]: DEBUG nova.compute.manager [req-b0de03c3-8a12-449a-9cc7-1cbd47e8878d req-27cfbdbb-ef09-423c-9839-f0befb72556b service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Refreshing instance network info cache due to event network-changed-b7ce94b7-c449-47bc-b068-3f8eca9f22f0. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1455.482259] env[62627]: DEBUG oslo_concurrency.lockutils [req-b0de03c3-8a12-449a-9cc7-1cbd47e8878d req-27cfbdbb-ef09-423c-9839-f0befb72556b service nova] Acquiring lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.482405] env[62627]: DEBUG oslo_concurrency.lockutils [req-b0de03c3-8a12-449a-9cc7-1cbd47e8878d req-27cfbdbb-ef09-423c-9839-f0befb72556b service nova] Acquired lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.482571] env[62627]: DEBUG nova.network.neutron [req-b0de03c3-8a12-449a-9cc7-1cbd47e8878d req-27cfbdbb-ef09-423c-9839-f0befb72556b service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Refreshing network info cache for port b7ce94b7-c449-47bc-b068-3f8eca9f22f0 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1455.816466] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195556, 'name': Destroy_Task, 'duration_secs': 0.526993} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.816902] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Destroyed the VM [ 1455.817236] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1455.817497] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-df82af2f-6ddc-4f87-9a60-3963673f400f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.823885] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1455.823885] env[62627]: value = "task-2195557" [ 1455.823885] env[62627]: _type = "Task" [ 1455.823885] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.830844] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195557, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.967853] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1456.015065] env[62627]: DEBUG nova.network.neutron [req-b0de03c3-8a12-449a-9cc7-1cbd47e8878d req-27cfbdbb-ef09-423c-9839-f0befb72556b service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1456.088307] env[62627]: DEBUG nova.network.neutron [req-b0de03c3-8a12-449a-9cc7-1cbd47e8878d req-27cfbdbb-ef09-423c-9839-f0befb72556b service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.333155] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195557, 'name': RemoveSnapshot_Task, 'duration_secs': 0.316048} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.333422] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1456.333695] env[62627]: DEBUG nova.compute.manager [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1456.334485] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daeabb4c-f17a-4bf4-bcb3-ea872f07d131 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.590942] env[62627]: DEBUG oslo_concurrency.lockutils [req-b0de03c3-8a12-449a-9cc7-1cbd47e8878d req-27cfbdbb-ef09-423c-9839-f0befb72556b service nova] Releasing lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.591433] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquired lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1456.591623] env[62627]: DEBUG nova.network.neutron [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1456.736265] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1456.736471] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1456.736617] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1456.845743] env[62627]: INFO nova.compute.manager [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Shelve offloading [ 1457.121611] env[62627]: DEBUG nova.network.neutron [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1457.242455] env[62627]: DEBUG nova.network.neutron [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Updating instance_info_cache with network_info: [{"id": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "address": "fa:16:3e:80:8b:50", "network": {"id": "da92dc57-06a7-46e5-a51b-1f0a8d7373a7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-952102388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b88e3a0d8ee4e3682bd312872dcbc0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ce94b7-c4", "ovs_interfaceid": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.349564] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1457.349949] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2692046e-c244-4f54-8adf-092d3be4e0cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.359377] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1457.359377] env[62627]: value = "task-2195558" [ 1457.359377] env[62627]: _type = "Task" [ 1457.359377] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.371833] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1457.372197] env[62627]: DEBUG nova.compute.manager [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1457.373506] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9deef88d-38c9-4cba-a985-83b0d6d13f71 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.381702] env[62627]: DEBUG oslo_concurrency.lockutils [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1457.381976] env[62627]: DEBUG oslo_concurrency.lockutils [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1457.382322] env[62627]: DEBUG nova.network.neutron [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1457.744604] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Releasing lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.744892] env[62627]: DEBUG nova.compute.manager [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Instance network_info: |[{"id": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "address": "fa:16:3e:80:8b:50", "network": {"id": "da92dc57-06a7-46e5-a51b-1f0a8d7373a7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-952102388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b88e3a0d8ee4e3682bd312872dcbc0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ce94b7-c4", "ovs_interfaceid": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1457.745316] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:8b:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a8f5363-be3a-4f92-9ccf-33bb0c8113b3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7ce94b7-c449-47bc-b068-3f8eca9f22f0', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1457.752764] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Creating folder: Project (4b88e3a0d8ee4e3682bd312872dcbc0f). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1457.753032] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-940f8bac-2698-4556-b195-297a40c5c4bc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.764981] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Created folder: Project (4b88e3a0d8ee4e3682bd312872dcbc0f) in parent group-v447541. [ 1457.765177] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Creating folder: Instances. Parent ref: group-v447858. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1457.765397] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb15d162-6f9a-4462-bc02-0f3a5685c843 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.774262] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Created folder: Instances in parent group-v447858. [ 1457.774490] env[62627]: DEBUG oslo.service.loopingcall [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1457.774663] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1457.774844] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0193808-8f70-4783-a45b-a051bcae2c7c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.792056] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1457.792056] env[62627]: value = "task-2195561" [ 1457.792056] env[62627]: _type = "Task" [ 1457.792056] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.799201] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195561, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.104073] env[62627]: DEBUG nova.network.neutron [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating instance_info_cache with network_info: [{"id": "54f0bc09-a4b0-47de-a805-27b17f146e00", "address": "fa:16:3e:50:f4:19", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54f0bc09-a4", "ovs_interfaceid": "54f0bc09-a4b0-47de-a805-27b17f146e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.245112] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Didn't find any instances for network info cache update. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1458.245329] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.245482] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.245632] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.245780] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.245921] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.246075] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.246208] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1458.246351] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.301703] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195561, 'name': CreateVM_Task, 'duration_secs': 0.285109} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.301902] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1458.302531] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.302698] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.303045] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1458.303298] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e70b6196-044c-4f93-a567-dcc5a08622b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.307988] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1458.307988] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f34974-806f-72dc-6ed5-450abdce930a" [ 1458.307988] env[62627]: _type = "Task" [ 1458.307988] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.315151] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f34974-806f-72dc-6ed5-450abdce930a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.606978] env[62627]: DEBUG oslo_concurrency.lockutils [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.749768] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.750021] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.750193] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.750387] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1458.751307] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b74ec9f-567b-4d8d-b3a0-90cadbf877f1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.760856] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b384512e-b18f-4d88-b07a-9939dd4f1fe3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.775401] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99dec1da-f8aa-4ed5-b1eb-08ae6d9bb053 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.782453] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc3bf23-a5af-4ccc-b1a9-b0f81ad3c85c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.824461] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180433MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1458.824461] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.824461] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.830360] env[62627]: DEBUG nova.compute.manager [req-4f95ad34-990d-471e-91e9-4352a3fffbf5 req-baec7d38-be9c-4191-9955-e9e432b917ca service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Received event network-vif-unplugged-54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1458.831792] env[62627]: DEBUG oslo_concurrency.lockutils [req-4f95ad34-990d-471e-91e9-4352a3fffbf5 req-baec7d38-be9c-4191-9955-e9e432b917ca service nova] Acquiring lock "77b68106-1abe-426b-89ce-3495776b9233-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.831792] env[62627]: DEBUG oslo_concurrency.lockutils [req-4f95ad34-990d-471e-91e9-4352a3fffbf5 req-baec7d38-be9c-4191-9955-e9e432b917ca service nova] Lock "77b68106-1abe-426b-89ce-3495776b9233-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.831792] env[62627]: DEBUG oslo_concurrency.lockutils [req-4f95ad34-990d-471e-91e9-4352a3fffbf5 req-baec7d38-be9c-4191-9955-e9e432b917ca service nova] Lock "77b68106-1abe-426b-89ce-3495776b9233-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.831792] env[62627]: DEBUG nova.compute.manager [req-4f95ad34-990d-471e-91e9-4352a3fffbf5 req-baec7d38-be9c-4191-9955-e9e432b917ca service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] No waiting events found dispatching network-vif-unplugged-54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1458.831792] env[62627]: WARNING nova.compute.manager [req-4f95ad34-990d-471e-91e9-4352a3fffbf5 req-baec7d38-be9c-4191-9955-e9e432b917ca service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Received unexpected event network-vif-unplugged-54f0bc09-a4b0-47de-a805-27b17f146e00 for instance with vm_state shelved and task_state shelving_offloading. [ 1458.838527] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f34974-806f-72dc-6ed5-450abdce930a, 'name': SearchDatastore_Task, 'duration_secs': 0.009866} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.838804] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.839038] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1458.839265] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.839409] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.839587] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1458.839835] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf427783-7b54-4b88-9884-90176c301d7c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.847524] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1458.847697] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1458.849052] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24ec7848-ebe7-43ff-a6ea-00280f1e4c73 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.854826] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1458.854826] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52801b55-1152-9425-1760-3845abec0ba4" [ 1458.854826] env[62627]: _type = "Task" [ 1458.854826] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.862404] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52801b55-1152-9425-1760-3845abec0ba4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.909446] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1458.910560] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7098ae-d8bc-44e7-9424-43e5ebd8c692 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.918416] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1458.918708] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3e6d0f0-ca4d-4a92-aea3-adbdfb56bbfc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.974915] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1458.975148] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1458.975328] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleting the datastore file [datastore2] 77b68106-1abe-426b-89ce-3495776b9233 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1458.975592] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33b2b10a-78ee-4dae-bd52-845d53c1362b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.982035] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1458.982035] env[62627]: value = "task-2195563" [ 1458.982035] env[62627]: _type = "Task" [ 1458.982035] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.991556] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.365655] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52801b55-1152-9425-1760-3845abec0ba4, 'name': SearchDatastore_Task, 'duration_secs': 0.008111} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.366395] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9d2b2ac-8d92-4fa4-8bbc-5fcff0d4ce38 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.375390] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1459.375390] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520e8cca-8852-76aa-9aa7-7b0346fd72c4" [ 1459.375390] env[62627]: _type = "Task" [ 1459.375390] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.383915] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520e8cca-8852-76aa-9aa7-7b0346fd72c4, 'name': SearchDatastore_Task} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.384196] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.384469] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 54b2cc24-6652-4cc4-93cc-b059f8a9be86/54b2cc24-6652-4cc4-93cc-b059f8a9be86.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1459.384713] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-000ccd8a-1b4a-4920-9f6a-fa26268cdb84 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.391405] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1459.391405] env[62627]: value = "task-2195564" [ 1459.391405] env[62627]: _type = "Task" [ 1459.391405] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.404377] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195564, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.491521] env[62627]: DEBUG oslo_vmware.api [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141331} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.491824] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1459.492056] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1459.492288] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1459.515472] env[62627]: INFO nova.scheduler.client.report [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleted allocations for instance 77b68106-1abe-426b-89ce-3495776b9233 [ 1459.852262] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance cdff04c1-ec74-4afa-9f8c-7e1acbe9f336 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1459.852427] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 54b2cc24-6652-4cc4-93cc-b059f8a9be86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1459.852632] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1459.852790] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1459.890436] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd04303-f659-4dee-836e-8900bc85e7b7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.903093] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7caf62-0efc-4cc2-9fc3-6297297a48a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.905855] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195564, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459383} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.906134] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 54b2cc24-6652-4cc4-93cc-b059f8a9be86/54b2cc24-6652-4cc4-93cc-b059f8a9be86.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1459.906344] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1459.906839] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78f7a056-27fe-439a-bb36-67b74867880f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.935770] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0d6b8f-5bc6-42cb-bc26-4608c89e9fa7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.938429] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1459.938429] env[62627]: value = "task-2195565" [ 1459.938429] env[62627]: _type = "Task" [ 1459.938429] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.946393] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15840d5-6d72-42c1-8e96-82fea3eaf9ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.952465] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195565, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.961963] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1460.020137] env[62627]: DEBUG oslo_concurrency.lockutils [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.448158] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195565, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090825} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.448531] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1460.449127] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdde5a5-4325-4a16-b155-43ae3074943c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.470397] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] 54b2cc24-6652-4cc4-93cc-b059f8a9be86/54b2cc24-6652-4cc4-93cc-b059f8a9be86.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1460.471152] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1460.473973] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5546428d-830e-45b1-a88b-2d2ceec75dac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.488509] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1460.488674] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.666s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.488914] env[62627]: DEBUG oslo_concurrency.lockutils [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.469s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.489141] env[62627]: DEBUG nova.objects.instance [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lazy-loading 'resources' on Instance uuid 77b68106-1abe-426b-89ce-3495776b9233 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1460.494426] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1460.494426] env[62627]: value = "task-2195566" [ 1460.494426] env[62627]: _type = "Task" [ 1460.494426] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.502523] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195566, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.843351] env[62627]: DEBUG nova.compute.manager [req-9e74cc9a-a900-4ee5-bfe4-ac18f5949354 req-61d6fcdf-0127-4145-8ac6-a7ab22e675b8 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Received event network-changed-54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1460.843522] env[62627]: DEBUG nova.compute.manager [req-9e74cc9a-a900-4ee5-bfe4-ac18f5949354 req-61d6fcdf-0127-4145-8ac6-a7ab22e675b8 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Refreshing instance network info cache due to event network-changed-54f0bc09-a4b0-47de-a805-27b17f146e00. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1460.843712] env[62627]: DEBUG oslo_concurrency.lockutils [req-9e74cc9a-a900-4ee5-bfe4-ac18f5949354 req-61d6fcdf-0127-4145-8ac6-a7ab22e675b8 service nova] Acquiring lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.844190] env[62627]: DEBUG oslo_concurrency.lockutils [req-9e74cc9a-a900-4ee5-bfe4-ac18f5949354 req-61d6fcdf-0127-4145-8ac6-a7ab22e675b8 service nova] Acquired lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.844369] env[62627]: DEBUG nova.network.neutron [req-9e74cc9a-a900-4ee5-bfe4-ac18f5949354 req-61d6fcdf-0127-4145-8ac6-a7ab22e675b8 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Refreshing network info cache for port 54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1460.991958] env[62627]: DEBUG nova.objects.instance [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lazy-loading 'numa_topology' on Instance uuid 77b68106-1abe-426b-89ce-3495776b9233 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1461.006375] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195566, 'name': ReconfigVM_Task, 'duration_secs': 0.257939} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.006631] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Reconfigured VM instance instance-00000079 to attach disk [datastore2] 54b2cc24-6652-4cc4-93cc-b059f8a9be86/54b2cc24-6652-4cc4-93cc-b059f8a9be86.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1461.007242] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41e155e2-7bfb-4477-9031-b7717a70727b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.013552] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1461.013552] env[62627]: value = "task-2195567" [ 1461.013552] env[62627]: _type = "Task" [ 1461.013552] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.022423] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195567, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.163699] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "77b68106-1abe-426b-89ce-3495776b9233" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.494508] env[62627]: DEBUG nova.objects.base [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Object Instance<77b68106-1abe-426b-89ce-3495776b9233> lazy-loaded attributes: resources,numa_topology {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1461.525972] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195567, 'name': Rename_Task, 'duration_secs': 0.16725} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.526390] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1461.526467] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91e578ec-2a5c-44f8-9362-38903009b53e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.533702] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1461.533702] env[62627]: value = "task-2195568" [ 1461.533702] env[62627]: _type = "Task" [ 1461.533702] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.541199] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195568, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.544579] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a34f410-517b-4d4b-b73b-77d9ca2fc1cc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.550922] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdc51ce-2f17-46be-bc4d-ecdcd8735edc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.580529] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426c244d-f66c-49cb-862c-ccccd54da0e1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.587635] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8cd408-2fe6-430d-8c2f-ab42d4f2151a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.600343] env[62627]: DEBUG nova.compute.provider_tree [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1461.640481] env[62627]: DEBUG nova.network.neutron [req-9e74cc9a-a900-4ee5-bfe4-ac18f5949354 req-61d6fcdf-0127-4145-8ac6-a7ab22e675b8 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updated VIF entry in instance network info cache for port 54f0bc09-a4b0-47de-a805-27b17f146e00. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1461.640840] env[62627]: DEBUG nova.network.neutron [req-9e74cc9a-a900-4ee5-bfe4-ac18f5949354 req-61d6fcdf-0127-4145-8ac6-a7ab22e675b8 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating instance_info_cache with network_info: [{"id": "54f0bc09-a4b0-47de-a805-27b17f146e00", "address": "fa:16:3e:50:f4:19", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap54f0bc09-a4", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.042943] env[62627]: DEBUG oslo_vmware.api [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195568, 'name': PowerOnVM_Task, 'duration_secs': 0.40824} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.044324] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1462.044324] env[62627]: INFO nova.compute.manager [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Took 7.35 seconds to spawn the instance on the hypervisor. [ 1462.044324] env[62627]: DEBUG nova.compute.manager [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1462.044549] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174304b0-80f7-46b4-8adf-fb00f1938a9d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.104151] env[62627]: DEBUG nova.scheduler.client.report [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1462.143142] env[62627]: DEBUG oslo_concurrency.lockutils [req-9e74cc9a-a900-4ee5-bfe4-ac18f5949354 req-61d6fcdf-0127-4145-8ac6-a7ab22e675b8 service nova] Releasing lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.559467] env[62627]: INFO nova.compute.manager [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Took 12.04 seconds to build instance. [ 1462.610902] env[62627]: DEBUG oslo_concurrency.lockutils [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.122s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.061379] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1aa85775-0ce1-45cb-a6f9-e2bc4be5f4a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.552s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.117550] env[62627]: DEBUG oslo_concurrency.lockutils [None req-64a2a8c0-2cd9-46d9-916a-889724d64f1a tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 19.856s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.118383] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.955s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.118586] env[62627]: INFO nova.compute.manager [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Unshelving [ 1463.652499] env[62627]: DEBUG nova.compute.manager [req-c3c07d83-2fea-4c8a-ad07-24dadf1d45e1 req-4e65ad7b-584a-467e-9810-8272ff041e05 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Received event network-changed-b7ce94b7-c449-47bc-b068-3f8eca9f22f0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1463.652737] env[62627]: DEBUG nova.compute.manager [req-c3c07d83-2fea-4c8a-ad07-24dadf1d45e1 req-4e65ad7b-584a-467e-9810-8272ff041e05 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Refreshing instance network info cache due to event network-changed-b7ce94b7-c449-47bc-b068-3f8eca9f22f0. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1463.652987] env[62627]: DEBUG oslo_concurrency.lockutils [req-c3c07d83-2fea-4c8a-ad07-24dadf1d45e1 req-4e65ad7b-584a-467e-9810-8272ff041e05 service nova] Acquiring lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.653177] env[62627]: DEBUG oslo_concurrency.lockutils [req-c3c07d83-2fea-4c8a-ad07-24dadf1d45e1 req-4e65ad7b-584a-467e-9810-8272ff041e05 service nova] Acquired lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.653372] env[62627]: DEBUG nova.network.neutron [req-c3c07d83-2fea-4c8a-ad07-24dadf1d45e1 req-4e65ad7b-584a-467e-9810-8272ff041e05 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Refreshing network info cache for port b7ce94b7-c449-47bc-b068-3f8eca9f22f0 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1464.129836] env[62627]: DEBUG nova.compute.utils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1464.375920] env[62627]: DEBUG nova.network.neutron [req-c3c07d83-2fea-4c8a-ad07-24dadf1d45e1 req-4e65ad7b-584a-467e-9810-8272ff041e05 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Updated VIF entry in instance network info cache for port b7ce94b7-c449-47bc-b068-3f8eca9f22f0. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1464.376332] env[62627]: DEBUG nova.network.neutron [req-c3c07d83-2fea-4c8a-ad07-24dadf1d45e1 req-4e65ad7b-584a-467e-9810-8272ff041e05 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Updating instance_info_cache with network_info: [{"id": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "address": "fa:16:3e:80:8b:50", "network": {"id": "da92dc57-06a7-46e5-a51b-1f0a8d7373a7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-952102388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b88e3a0d8ee4e3682bd312872dcbc0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ce94b7-c4", "ovs_interfaceid": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.633272] env[62627]: INFO nova.virt.block_device [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Booting with volume b3d23526-ef45-4e03-a5a6-5df1c7626e46 at /dev/sdb [ 1464.668574] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc7410cb-0729-4809-b4cc-7daa9d78b282 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.677865] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107baad8-7054-4b17-95a1-52ba2f8739ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.701681] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7073ea2-d5b9-42f6-875e-e522a7a41c7a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.709173] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b04fa6-bb8b-48ac-82de-0d6823cb1346 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.733542] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac8d2a8-7c6e-4012-bef8-29fea4f48694 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.739593] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667c9d3c-f674-410c-bb13-d6ad48ebf544 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.752330] env[62627]: DEBUG nova.virt.block_device [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating existing volume attachment record: 136f0435-0f73-45f5-9255-7a30ce8a0771 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1464.879768] env[62627]: DEBUG oslo_concurrency.lockutils [req-c3c07d83-2fea-4c8a-ad07-24dadf1d45e1 req-4e65ad7b-584a-467e-9810-8272ff041e05 service nova] Releasing lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.844932] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.845217] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.845418] env[62627]: DEBUG nova.objects.instance [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lazy-loading 'pci_requests' on Instance uuid 77b68106-1abe-426b-89ce-3495776b9233 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1471.332472] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.332718] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.332902] env[62627]: DEBUG nova.compute.manager [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1471.333881] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2cd6c5a-c825-48f3-a59a-20a511a0323d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.340483] env[62627]: DEBUG nova.compute.manager [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62627) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1471.341093] env[62627]: DEBUG nova.objects.instance [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'flavor' on Instance uuid cdff04c1-ec74-4afa-9f8c-7e1acbe9f336 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1471.348919] env[62627]: DEBUG nova.objects.instance [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lazy-loading 'numa_topology' on Instance uuid 77b68106-1abe-426b-89ce-3495776b9233 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1471.851346] env[62627]: INFO nova.compute.claims [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1472.349108] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1472.349429] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6214542-9eeb-49d7-aaf9-65722a0c8b71 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.357401] env[62627]: DEBUG oslo_vmware.api [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1472.357401] env[62627]: value = "task-2195573" [ 1472.357401] env[62627]: _type = "Task" [ 1472.357401] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.364856] env[62627]: DEBUG oslo_vmware.api [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195573, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.868921] env[62627]: DEBUG oslo_vmware.api [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195573, 'name': PowerOffVM_Task, 'duration_secs': 0.157642} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.869243] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1472.869342] env[62627]: DEBUG nova.compute.manager [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1472.870081] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5879f2-5d4e-47c2-856b-e1dcb33d6d08 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.904378] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16008a8a-1eaa-4f8a-a125-bc65c22938de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.913712] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0875db-a76f-4788-bef4-5490db950872 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.943840] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4472667-d213-428e-b945-9a732edb7563 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.950905] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87eb9bd1-f073-490f-8c9a-244c5fd023c2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.963774] env[62627]: DEBUG nova.compute.provider_tree [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.383331] env[62627]: DEBUG oslo_concurrency.lockutils [None req-4de86eeb-fd51-4df5-b2df-1d480f184696 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.050s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.466513] env[62627]: DEBUG nova.scheduler.client.report [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1473.682554] env[62627]: DEBUG nova.objects.instance [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'flavor' on Instance uuid cdff04c1-ec74-4afa-9f8c-7e1acbe9f336 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1473.971737] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.126s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.000407] env[62627]: INFO nova.network.neutron [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating port 54f0bc09-a4b0-47de-a805-27b17f146e00 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1474.190176] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.190357] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.190565] env[62627]: DEBUG nova.network.neutron [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1474.190754] env[62627]: DEBUG nova.objects.instance [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'info_cache' on Instance uuid cdff04c1-ec74-4afa-9f8c-7e1acbe9f336 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1474.694237] env[62627]: DEBUG nova.objects.base [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1475.418803] env[62627]: DEBUG nova.network.neutron [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Updating instance_info_cache with network_info: [{"id": "238d30ee-1280-409d-89ff-3c112db3c2d4", "address": "fa:16:3e:51:1b:2f", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap238d30ee-12", "ovs_interfaceid": "238d30ee-1280-409d-89ff-3c112db3c2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.643793] env[62627]: DEBUG nova.compute.manager [req-c3e074b2-fdf0-4206-bcb7-7b16631b42b1 req-448c9810-2a69-42d3-9bbe-99fad963706e service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Received event network-vif-plugged-54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1475.644017] env[62627]: DEBUG oslo_concurrency.lockutils [req-c3e074b2-fdf0-4206-bcb7-7b16631b42b1 req-448c9810-2a69-42d3-9bbe-99fad963706e service nova] Acquiring lock "77b68106-1abe-426b-89ce-3495776b9233-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.644404] env[62627]: DEBUG oslo_concurrency.lockutils [req-c3e074b2-fdf0-4206-bcb7-7b16631b42b1 req-448c9810-2a69-42d3-9bbe-99fad963706e service nova] Lock "77b68106-1abe-426b-89ce-3495776b9233-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.644586] env[62627]: DEBUG oslo_concurrency.lockutils [req-c3e074b2-fdf0-4206-bcb7-7b16631b42b1 req-448c9810-2a69-42d3-9bbe-99fad963706e service nova] Lock "77b68106-1abe-426b-89ce-3495776b9233-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.644757] env[62627]: DEBUG nova.compute.manager [req-c3e074b2-fdf0-4206-bcb7-7b16631b42b1 req-448c9810-2a69-42d3-9bbe-99fad963706e service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] No waiting events found dispatching network-vif-plugged-54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1475.644923] env[62627]: WARNING nova.compute.manager [req-c3e074b2-fdf0-4206-bcb7-7b16631b42b1 req-448c9810-2a69-42d3-9bbe-99fad963706e service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Received unexpected event network-vif-plugged-54f0bc09-a4b0-47de-a805-27b17f146e00 for instance with vm_state shelved_offloaded and task_state spawning. [ 1475.727432] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.727432] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.727432] env[62627]: DEBUG nova.network.neutron [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1475.923067] env[62627]: DEBUG oslo_concurrency.lockutils [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.422247] env[62627]: DEBUG nova.network.neutron [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating instance_info_cache with network_info: [{"id": "54f0bc09-a4b0-47de-a805-27b17f146e00", "address": "fa:16:3e:50:f4:19", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54f0bc09-a4", "ovs_interfaceid": "54f0bc09-a4b0-47de-a805-27b17f146e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.924817] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.928742] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1476.929027] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31fb5720-ff55-4358-bda9-845da33a7d4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.936857] env[62627]: DEBUG oslo_vmware.api [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1476.936857] env[62627]: value = "task-2195574" [ 1476.936857] env[62627]: _type = "Task" [ 1476.936857] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.944686] env[62627]: DEBUG oslo_vmware.api [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.952203] env[62627]: DEBUG nova.virt.hardware [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='2a2988067107a5588f7b79ebb7079f2c',container_format='bare',created_at=2025-01-30T20:42:52Z,direct_url=,disk_format='vmdk',id=8c1fe05c-cd55-465a-a01d-4fcf29f67dd5,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1925964092-shelved',owner='6095198e22f645698a7c566414eba47c',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2025-01-30T20:43:04Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1476.952433] env[62627]: DEBUG nova.virt.hardware [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1476.952592] env[62627]: DEBUG nova.virt.hardware [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1476.952771] env[62627]: DEBUG nova.virt.hardware [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1476.952916] env[62627]: DEBUG nova.virt.hardware [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1476.953074] env[62627]: DEBUG nova.virt.hardware [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1476.953284] env[62627]: DEBUG nova.virt.hardware [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1476.953443] env[62627]: DEBUG nova.virt.hardware [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1476.953606] env[62627]: DEBUG nova.virt.hardware [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1476.953765] env[62627]: DEBUG nova.virt.hardware [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1476.953934] env[62627]: DEBUG nova.virt.hardware [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1476.954705] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d5cdf2-774e-4496-8ca0-e4c0ebb2406d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.962018] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a211af-5529-4478-8651-9786c9819b6c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.974815] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:f4:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd38fdec-d092-4a84-ab41-685f6dbb4f29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54f0bc09-a4b0-47de-a805-27b17f146e00', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1476.982058] env[62627]: DEBUG oslo.service.loopingcall [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.982298] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1476.982498] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-139e4d47-83e8-4e53-9703-1c37ca3d2b7f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.000343] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1477.000343] env[62627]: value = "task-2195575" [ 1477.000343] env[62627]: _type = "Task" [ 1477.000343] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.007586] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195575, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.447905] env[62627]: DEBUG oslo_vmware.api [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195574, 'name': PowerOnVM_Task, 'duration_secs': 0.382864} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.448328] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1477.448369] env[62627]: DEBUG nova.compute.manager [None req-9b3240f2-c203-4b58-8ff0-aef7b7a43421 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1477.449290] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886f7486-300d-48fc-a054-339fe3cee59b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.510606] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195575, 'name': CreateVM_Task, 'duration_secs': 0.266776} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.510777] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1477.511400] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.511566] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.511951] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1477.512220] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-246db993-b4ec-443a-a5bd-40b34c998704 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.516782] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1477.516782] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ed0cf4-8e60-878d-2c02-69c10b5aab7b" [ 1477.516782] env[62627]: _type = "Task" [ 1477.516782] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.524140] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52ed0cf4-8e60-878d-2c02-69c10b5aab7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.668791] env[62627]: DEBUG nova.compute.manager [req-47328401-9aec-41ec-ab51-cbbda05d2cc3 req-fcd6cf8a-8a3a-4740-8f71-d056ae64afa7 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Received event network-changed-54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1477.668989] env[62627]: DEBUG nova.compute.manager [req-47328401-9aec-41ec-ab51-cbbda05d2cc3 req-fcd6cf8a-8a3a-4740-8f71-d056ae64afa7 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Refreshing instance network info cache due to event network-changed-54f0bc09-a4b0-47de-a805-27b17f146e00. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1477.669215] env[62627]: DEBUG oslo_concurrency.lockutils [req-47328401-9aec-41ec-ab51-cbbda05d2cc3 req-fcd6cf8a-8a3a-4740-8f71-d056ae64afa7 service nova] Acquiring lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.669360] env[62627]: DEBUG oslo_concurrency.lockutils [req-47328401-9aec-41ec-ab51-cbbda05d2cc3 req-fcd6cf8a-8a3a-4740-8f71-d056ae64afa7 service nova] Acquired lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.669523] env[62627]: DEBUG nova.network.neutron [req-47328401-9aec-41ec-ab51-cbbda05d2cc3 req-fcd6cf8a-8a3a-4740-8f71-d056ae64afa7 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Refreshing network info cache for port 54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1478.027185] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.027505] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Processing image 8c1fe05c-cd55-465a-a01d-4fcf29f67dd5 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1478.027741] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.027887] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.028077] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1478.028329] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d47e9d4-ac0f-4085-ae85-842b8d854f47 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.045700] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1478.045700] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1478.046544] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9371bbf2-19d7-4f16-a224-0eeb32bbde48 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.051450] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1478.051450] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]528b18a5-a4ef-18ee-0138-969ba0e1db7c" [ 1478.051450] env[62627]: _type = "Task" [ 1478.051450] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.058425] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]528b18a5-a4ef-18ee-0138-969ba0e1db7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.390044] env[62627]: DEBUG nova.network.neutron [req-47328401-9aec-41ec-ab51-cbbda05d2cc3 req-fcd6cf8a-8a3a-4740-8f71-d056ae64afa7 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updated VIF entry in instance network info cache for port 54f0bc09-a4b0-47de-a805-27b17f146e00. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1478.390422] env[62627]: DEBUG nova.network.neutron [req-47328401-9aec-41ec-ab51-cbbda05d2cc3 req-fcd6cf8a-8a3a-4740-8f71-d056ae64afa7 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating instance_info_cache with network_info: [{"id": "54f0bc09-a4b0-47de-a805-27b17f146e00", "address": "fa:16:3e:50:f4:19", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54f0bc09-a4", "ovs_interfaceid": "54f0bc09-a4b0-47de-a805-27b17f146e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.561999] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Preparing fetch location {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1478.562388] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Fetch image to [datastore2] OSTACK_IMG_9cd333f5-bbce-494b-b8de-d8b1a81c9abc/OSTACK_IMG_9cd333f5-bbce-494b-b8de-d8b1a81c9abc.vmdk {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1478.562484] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Downloading stream optimized image 8c1fe05c-cd55-465a-a01d-4fcf29f67dd5 to [datastore2] OSTACK_IMG_9cd333f5-bbce-494b-b8de-d8b1a81c9abc/OSTACK_IMG_9cd333f5-bbce-494b-b8de-d8b1a81c9abc.vmdk on the data store datastore2 as vApp {{(pid=62627) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1478.562685] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Downloading image file data 8c1fe05c-cd55-465a-a01d-4fcf29f67dd5 to the ESX as VM named 'OSTACK_IMG_9cd333f5-bbce-494b-b8de-d8b1a81c9abc' {{(pid=62627) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1478.600625] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73200a1-1cd5-4fde-bcd0-964195df95da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.606677] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2e4efa-722e-413b-bfb8-091674e38e68 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Suspending the VM {{(pid=62627) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1478.606912] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9345f3f0-3c4d-4508-9cca-60505c69914a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.614276] env[62627]: DEBUG oslo_vmware.api [None req-0e2e4efa-722e-413b-bfb8-091674e38e68 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1478.614276] env[62627]: value = "task-2195576" [ 1478.614276] env[62627]: _type = "Task" [ 1478.614276] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.623681] env[62627]: DEBUG oslo_vmware.api [None req-0e2e4efa-722e-413b-bfb8-091674e38e68 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195576, 'name': SuspendVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.635120] env[62627]: DEBUG oslo_vmware.rw_handles [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1478.635120] env[62627]: value = "resgroup-9" [ 1478.635120] env[62627]: _type = "ResourcePool" [ 1478.635120] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1478.635408] env[62627]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-94e12fb9-10c6-43b1-9bba-108e35a90279 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.654831] env[62627]: DEBUG oslo_vmware.rw_handles [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lease: (returnval){ [ 1478.654831] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fc4252-2759-910b-0dc9-6dfc09e130ce" [ 1478.654831] env[62627]: _type = "HttpNfcLease" [ 1478.654831] env[62627]: } obtained for vApp import into resource pool (val){ [ 1478.654831] env[62627]: value = "resgroup-9" [ 1478.654831] env[62627]: _type = "ResourcePool" [ 1478.654831] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1478.655271] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the lease: (returnval){ [ 1478.655271] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fc4252-2759-910b-0dc9-6dfc09e130ce" [ 1478.655271] env[62627]: _type = "HttpNfcLease" [ 1478.655271] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1478.662099] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1478.662099] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fc4252-2759-910b-0dc9-6dfc09e130ce" [ 1478.662099] env[62627]: _type = "HttpNfcLease" [ 1478.662099] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1478.894264] env[62627]: DEBUG oslo_concurrency.lockutils [req-47328401-9aec-41ec-ab51-cbbda05d2cc3 req-fcd6cf8a-8a3a-4740-8f71-d056ae64afa7 service nova] Releasing lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.124630] env[62627]: DEBUG oslo_vmware.api [None req-0e2e4efa-722e-413b-bfb8-091674e38e68 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195576, 'name': SuspendVM_Task} progress is 70%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.162831] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1479.162831] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fc4252-2759-910b-0dc9-6dfc09e130ce" [ 1479.162831] env[62627]: _type = "HttpNfcLease" [ 1479.162831] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1479.625352] env[62627]: DEBUG oslo_vmware.api [None req-0e2e4efa-722e-413b-bfb8-091674e38e68 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195576, 'name': SuspendVM_Task, 'duration_secs': 0.588002} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.625747] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2e4efa-722e-413b-bfb8-091674e38e68 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Suspended the VM {{(pid=62627) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1479.625814] env[62627]: DEBUG nova.compute.manager [None req-0e2e4efa-722e-413b-bfb8-091674e38e68 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1479.626565] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b289f3cc-1f28-4a5c-a148-e66724ca2251 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.664133] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1479.664133] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fc4252-2759-910b-0dc9-6dfc09e130ce" [ 1479.664133] env[62627]: _type = "HttpNfcLease" [ 1479.664133] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1479.664440] env[62627]: DEBUG oslo_vmware.rw_handles [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1479.664440] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52fc4252-2759-910b-0dc9-6dfc09e130ce" [ 1479.664440] env[62627]: _type = "HttpNfcLease" [ 1479.664440] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1479.665158] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5449e5f4-f7b5-4143-930d-78fef0817e15 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.672644] env[62627]: DEBUG oslo_vmware.rw_handles [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522660e6-a0b9-c0cc-2eb2-435b9df8bb71/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1479.672872] env[62627]: DEBUG oslo_vmware.rw_handles [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522660e6-a0b9-c0cc-2eb2-435b9df8bb71/disk-0.vmdk. {{(pid=62627) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1479.735523] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b4a9db35-3a1c-4fca-8037-252c26716853 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.759388] env[62627]: DEBUG oslo_vmware.rw_handles [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Completed reading data from the image iterator. {{(pid=62627) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1480.759786] env[62627]: DEBUG oslo_vmware.rw_handles [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522660e6-a0b9-c0cc-2eb2-435b9df8bb71/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1480.760542] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc06855-aa6d-447d-a855-1282300138d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.766992] env[62627]: DEBUG oslo_vmware.rw_handles [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522660e6-a0b9-c0cc-2eb2-435b9df8bb71/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1480.767177] env[62627]: DEBUG oslo_vmware.rw_handles [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522660e6-a0b9-c0cc-2eb2-435b9df8bb71/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1480.767400] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-cbcbeca4-6ae3-4745-a66f-29b628483532 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.947896] env[62627]: INFO nova.compute.manager [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Resuming [ 1480.948680] env[62627]: DEBUG nova.objects.instance [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'flavor' on Instance uuid cdff04c1-ec74-4afa-9f8c-7e1acbe9f336 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1480.970095] env[62627]: DEBUG oslo_vmware.rw_handles [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522660e6-a0b9-c0cc-2eb2-435b9df8bb71/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1480.970356] env[62627]: INFO nova.virt.vmwareapi.images [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Downloaded image file data 8c1fe05c-cd55-465a-a01d-4fcf29f67dd5 [ 1480.971500] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5cbb82-d6a7-4665-b098-bce1064abc0f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.987099] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9961679b-cde3-4913-8e7d-4bcba947f7c6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.012261] env[62627]: INFO nova.virt.vmwareapi.images [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] The imported VM was unregistered [ 1481.014743] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Caching image {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1481.015053] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating directory with path [datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1481.015313] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0145c57a-4578-4102-9dfa-784cc738d68c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.041409] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Created directory with path [datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1481.041621] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_9cd333f5-bbce-494b-b8de-d8b1a81c9abc/OSTACK_IMG_9cd333f5-bbce-494b-b8de-d8b1a81c9abc.vmdk to [datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5.vmdk. {{(pid=62627) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1481.041851] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-d5f4bfc2-45cd-4d8b-bc4d-9c8becce2fda {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.049218] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1481.049218] env[62627]: value = "task-2195579" [ 1481.049218] env[62627]: _type = "Task" [ 1481.049218] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.056410] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195579, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.560765] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195579, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.061868] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195579, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.459468] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.459662] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquired lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.459892] env[62627]: DEBUG nova.network.neutron [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1482.561353] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195579, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.062595] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195579, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.191521] env[62627]: DEBUG nova.network.neutron [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Updating instance_info_cache with network_info: [{"id": "238d30ee-1280-409d-89ff-3c112db3c2d4", "address": "fa:16:3e:51:1b:2f", "network": {"id": "da26ed97-a6a7-47f1-b6c0-d1b2f45ad202", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-265963183-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97c11d791dcf4f94a8e8bda7a6f014cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap238d30ee-12", "ovs_interfaceid": "238d30ee-1280-409d-89ff-3c112db3c2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.561190] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195579, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.694525] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Releasing lock "refresh_cache-cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.695942] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69070818-b527-4b8a-8658-740266118b08 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.705141] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Resuming the VM {{(pid=62627) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1483.705533] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-104f0c78-89f0-4308-8ac7-3c9bd003a1d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.713060] env[62627]: DEBUG oslo_vmware.api [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1483.713060] env[62627]: value = "task-2195580" [ 1483.713060] env[62627]: _type = "Task" [ 1483.713060] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.721241] env[62627]: DEBUG oslo_vmware.api [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195580, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.061958] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195579, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.223465] env[62627]: DEBUG oslo_vmware.api [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195580, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.562503] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195579, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.723981] env[62627]: DEBUG oslo_vmware.api [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195580, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.066086] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195579, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.613506} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.066086] env[62627]: INFO nova.virt.vmwareapi.ds_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_9cd333f5-bbce-494b-b8de-d8b1a81c9abc/OSTACK_IMG_9cd333f5-bbce-494b-b8de-d8b1a81c9abc.vmdk to [datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5.vmdk. [ 1485.066346] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Cleaning up location [datastore2] OSTACK_IMG_9cd333f5-bbce-494b-b8de-d8b1a81c9abc {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1485.066346] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_9cd333f5-bbce-494b-b8de-d8b1a81c9abc {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1485.066593] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28905afb-395c-43ac-982d-686e266cc6ef {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.075936] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1485.075936] env[62627]: value = "task-2195581" [ 1485.075936] env[62627]: _type = "Task" [ 1485.075936] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.087149] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.225623] env[62627]: DEBUG oslo_vmware.api [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195580, 'name': PowerOnVM_Task, 'duration_secs': 1.435504} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.226087] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Resumed the VM {{(pid=62627) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1485.226087] env[62627]: DEBUG nova.compute.manager [None req-0e9920a8-a717-4613-a621-3f794ff41e20 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1485.226839] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff24450d-ffe3-45d6-b2c8-ac86a51f84b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.585769] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.313241} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.586090] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1485.586231] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.586471] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5.vmdk to [datastore2] 77b68106-1abe-426b-89ce-3495776b9233/77b68106-1abe-426b-89ce-3495776b9233.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1485.586722] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46447c6c-4d3d-48a8-a929-a7ebe9b67ff3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.593343] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1485.593343] env[62627]: value = "task-2195582" [ 1485.593343] env[62627]: _type = "Task" [ 1485.593343] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.600562] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.103952] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.605358] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195582, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.830368] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.830698] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.830879] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.831084] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.831262] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.833486] env[62627]: INFO nova.compute.manager [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Terminating instance [ 1487.108518] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195582, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.337021] env[62627]: DEBUG nova.compute.manager [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1487.337357] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1487.338100] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a64878-5c1c-41d9-9a00-44ba16da3e92 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.347888] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1487.348178] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a734e4c-1775-499a-9ed8-10f2dc504c30 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.357127] env[62627]: DEBUG oslo_vmware.api [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1487.357127] env[62627]: value = "task-2195583" [ 1487.357127] env[62627]: _type = "Task" [ 1487.357127] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.368365] env[62627]: DEBUG oslo_vmware.api [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.608751] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195582, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.871141] env[62627]: DEBUG oslo_vmware.api [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.109407] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195582, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.370476] env[62627]: DEBUG oslo_vmware.api [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195583, 'name': PowerOffVM_Task, 'duration_secs': 0.95865} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.370798] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1488.370951] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1488.371225] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-526572f6-c261-4925-9cdd-27bef1a63954 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.494442] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1488.494757] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1488.494856] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleting the datastore file [datastore2] cdff04c1-ec74-4afa-9f8c-7e1acbe9f336 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1488.495126] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62c8ceaf-3c13-44a7-80d1-d1e21e4b596a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.501386] env[62627]: DEBUG oslo_vmware.api [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for the task: (returnval){ [ 1488.501386] env[62627]: value = "task-2195585" [ 1488.501386] env[62627]: _type = "Task" [ 1488.501386] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.508785] env[62627]: DEBUG oslo_vmware.api [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195585, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.607844] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195582, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.913463} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.608107] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5/8c1fe05c-cd55-465a-a01d-4fcf29f67dd5.vmdk to [datastore2] 77b68106-1abe-426b-89ce-3495776b9233/77b68106-1abe-426b-89ce-3495776b9233.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1488.608886] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6fd0d4-31c5-4778-b195-793de86e3972 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.631439] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 77b68106-1abe-426b-89ce-3495776b9233/77b68106-1abe-426b-89ce-3495776b9233.vmdk or device None with type streamOptimized {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1488.631730] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff8680b2-3c07-4f82-9891-e27a358326b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.652523] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1488.652523] env[62627]: value = "task-2195586" [ 1488.652523] env[62627]: _type = "Task" [ 1488.652523] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.660265] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195586, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.011510] env[62627]: DEBUG oslo_vmware.api [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Task: {'id': task-2195585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144192} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.011785] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1489.011971] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1489.012159] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1489.012334] env[62627]: INFO nova.compute.manager [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1489.012572] env[62627]: DEBUG oslo.service.loopingcall [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1489.012758] env[62627]: DEBUG nova.compute.manager [-] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1489.012857] env[62627]: DEBUG nova.network.neutron [-] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1489.163232] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195586, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.506861] env[62627]: DEBUG nova.compute.manager [req-b2852a7d-7126-462e-b03d-aa4de9bc4e2f req-b665ca5a-0ea0-4b69-a564-7d43e39f51d3 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Received event network-vif-deleted-238d30ee-1280-409d-89ff-3c112db3c2d4 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1489.506861] env[62627]: INFO nova.compute.manager [req-b2852a7d-7126-462e-b03d-aa4de9bc4e2f req-b665ca5a-0ea0-4b69-a564-7d43e39f51d3 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Neutron deleted interface 238d30ee-1280-409d-89ff-3c112db3c2d4; detaching it from the instance and deleting it from the info cache [ 1489.506861] env[62627]: DEBUG nova.network.neutron [req-b2852a7d-7126-462e-b03d-aa4de9bc4e2f req-b665ca5a-0ea0-4b69-a564-7d43e39f51d3 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.667223] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195586, 'name': ReconfigVM_Task, 'duration_secs': 0.536352} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.667653] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 77b68106-1abe-426b-89ce-3495776b9233/77b68106-1abe-426b-89ce-3495776b9233.vmdk or device None with type streamOptimized {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1489.669561] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'encrypted': False, 'encryption_secret_uuid': None, 'encryption_options': None, 'boot_index': 0, 'encryption_format': None, 'guest_format': None, 'disk_bus': None, 'device_type': 'disk', 'size': 0, 'image_id': 'd5497cf1-16e8-478a-a834-e9289b1751b7'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'attachment_id': '136f0435-0f73-45f5-9255-7a30ce8a0771', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447862', 'volume_id': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'name': 'volume-b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '77b68106-1abe-426b-89ce-3495776b9233', 'attached_at': '', 'detached_at': '', 'volume_id': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'serial': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46'}, 'boot_index': None, 'mount_device': '/dev/sdb', 'disk_bus': None, 'device_type': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=62627) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1489.669896] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1489.670232] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447862', 'volume_id': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'name': 'volume-b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '77b68106-1abe-426b-89ce-3495776b9233', 'attached_at': '', 'detached_at': '', 'volume_id': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'serial': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1489.671441] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a0e46f-ae32-49c7-bb9a-281d59bc670b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.696399] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf83113-615f-4ec4-807a-b14617f2a61a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.741044] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] volume-b3d23526-ef45-4e03-a5a6-5df1c7626e46/volume-b3d23526-ef45-4e03-a5a6-5df1c7626e46.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1489.741272] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31eb1d77-c798-4f66-8530-dacff52de9ea {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.771612] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1489.771612] env[62627]: value = "task-2195587" [ 1489.771612] env[62627]: _type = "Task" [ 1489.771612] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.783564] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195587, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.990025] env[62627]: DEBUG nova.network.neutron [-] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.008637] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-548edbe0-d15e-49bd-bb19-8c817d2b3994 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.019073] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406a8fdf-bb21-40f6-9d8d-14297e5857d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.044208] env[62627]: DEBUG nova.compute.manager [req-b2852a7d-7126-462e-b03d-aa4de9bc4e2f req-b665ca5a-0ea0-4b69-a564-7d43e39f51d3 service nova] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Detach interface failed, port_id=238d30ee-1280-409d-89ff-3c112db3c2d4, reason: Instance cdff04c1-ec74-4afa-9f8c-7e1acbe9f336 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1490.281536] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195587, 'name': ReconfigVM_Task, 'duration_secs': 0.314887} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.281806] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Reconfigured VM instance instance-00000076 to attach disk [datastore2] volume-b3d23526-ef45-4e03-a5a6-5df1c7626e46/volume-b3d23526-ef45-4e03-a5a6-5df1c7626e46.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1490.286373] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95ad7576-57a4-439e-b301-315035f48c7a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.300994] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1490.300994] env[62627]: value = "task-2195588" [ 1490.300994] env[62627]: _type = "Task" [ 1490.300994] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.308729] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195588, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.493046] env[62627]: INFO nova.compute.manager [-] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Took 1.48 seconds to deallocate network for instance. [ 1490.810968] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195588, 'name': ReconfigVM_Task, 'duration_secs': 0.137472} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.811265] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447862', 'volume_id': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'name': 'volume-b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '77b68106-1abe-426b-89ce-3495776b9233', 'attached_at': '', 'detached_at': '', 'volume_id': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'serial': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1490.811978] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7233d2a5-daec-4220-bc4a-9dc74066a623 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.819033] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1490.819033] env[62627]: value = "task-2195589" [ 1490.819033] env[62627]: _type = "Task" [ 1490.819033] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.826723] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195589, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.999228] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.999613] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.999730] env[62627]: DEBUG nova.objects.instance [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lazy-loading 'resources' on Instance uuid cdff04c1-ec74-4afa-9f8c-7e1acbe9f336 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1491.328699] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195589, 'name': Rename_Task, 'duration_secs': 0.149349} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.329010] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1491.329261] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a7b22c5-5d4e-4b23-a295-7e9f57bbbdf5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.335565] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1491.335565] env[62627]: value = "task-2195590" [ 1491.335565] env[62627]: _type = "Task" [ 1491.335565] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.344106] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195590, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.554255] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdff56f0-9973-4d85-b409-09d2e661767e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.561380] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c09392-a17a-4375-9a1a-baa46156bd4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.592046] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d141309b-5fd6-42f6-80d6-1bb9143dbc49 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.599254] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcaf0dc-071b-4147-9fda-19e013a26beb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.612034] env[62627]: DEBUG nova.compute.provider_tree [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.846216] env[62627]: DEBUG oslo_vmware.api [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195590, 'name': PowerOnVM_Task, 'duration_secs': 0.427707} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.846479] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1491.938633] env[62627]: DEBUG nova.compute.manager [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1491.939624] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130e6e31-99ea-4866-8382-c37005ece202 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.114871] env[62627]: DEBUG nova.scheduler.client.report [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1492.455854] env[62627]: DEBUG oslo_concurrency.lockutils [None req-30df1e2c-11a1-4e7d-a32e-7d096d587467 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 29.337s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.619241] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.641567] env[62627]: INFO nova.scheduler.client.report [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Deleted allocations for instance cdff04c1-ec74-4afa-9f8c-7e1acbe9f336 [ 1493.150441] env[62627]: DEBUG oslo_concurrency.lockutils [None req-b715b43a-d305-4968-9299-843754679ba6 tempest-ServerActionsTestJSON-1886905081 tempest-ServerActionsTestJSON-1886905081-project-member] Lock "cdff04c1-ec74-4afa-9f8c-7e1acbe9f336" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.320s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.966503] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "de1345ab-57f6-4d2c-918e-e9cdcdc16b50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.966802] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "de1345ab-57f6-4d2c-918e-e9cdcdc16b50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.135180] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "a66936c9-e164-4431-b5e4-40cfbdb1ca8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.135656] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "a66936c9-e164-4431-b5e4-40cfbdb1ca8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.469183] env[62627]: DEBUG nova.compute.manager [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1496.638715] env[62627]: DEBUG nova.compute.manager [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1496.990098] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.990352] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.991772] env[62627]: INFO nova.compute.claims [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1497.157449] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.057690] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79b11d4-aa75-4003-a25c-9726d523f3de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.065204] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbece52-b42b-43a3-8e91-46d84dd6420b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.095042] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0586c39d-bae8-4cff-9dc5-6f9c797853d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.101581] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db849bb-5a3e-4b5c-98c3-8a91b1c66645 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.113934] env[62627]: DEBUG nova.compute.provider_tree [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.617254] env[62627]: DEBUG nova.scheduler.client.report [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1499.122534] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.132s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.123062] env[62627]: DEBUG nova.compute.manager [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1499.125959] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.969s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.127367] env[62627]: INFO nova.compute.claims [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1499.631805] env[62627]: DEBUG nova.compute.utils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1499.635137] env[62627]: DEBUG nova.compute.manager [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Not allocating networking since 'none' was specified. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1500.136330] env[62627]: DEBUG nova.compute.manager [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1500.195064] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4bb1c92-42ac-4921-ad00-2a62cbbd8fbf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.202051] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3245d275-1fbd-4484-9d44-d135966ed739 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.231857] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4363664-01f4-4a52-bea1-7629a3a97c54 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.239294] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28ed0e0-eb44-41c0-92ea-868f4ed5800f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.251948] env[62627]: DEBUG nova.compute.provider_tree [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1500.755241] env[62627]: DEBUG nova.scheduler.client.report [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1501.148307] env[62627]: DEBUG nova.compute.manager [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1501.174591] env[62627]: DEBUG nova.virt.hardware [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1501.174892] env[62627]: DEBUG nova.virt.hardware [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1501.175091] env[62627]: DEBUG nova.virt.hardware [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1501.175322] env[62627]: DEBUG nova.virt.hardware [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1501.175508] env[62627]: DEBUG nova.virt.hardware [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1501.175722] env[62627]: DEBUG nova.virt.hardware [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1501.175986] env[62627]: DEBUG nova.virt.hardware [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1501.176207] env[62627]: DEBUG nova.virt.hardware [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1501.176416] env[62627]: DEBUG nova.virt.hardware [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1501.176616] env[62627]: DEBUG nova.virt.hardware [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1501.176820] env[62627]: DEBUG nova.virt.hardware [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1501.177824] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09057fba-0898-4a8c-9ba7-f2fb419b4df9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.186104] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d549f28e-42d2-45ed-96e0-b730fcb14cb4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.198947] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1501.204428] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Creating folder: Project (979fcbe7f452472a851cbb84db7c835b). Parent ref: group-v447541. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1501.204669] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8e7ab99-28e2-4bd7-a519-ec31478bc2f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.214626] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Created folder: Project (979fcbe7f452472a851cbb84db7c835b) in parent group-v447541. [ 1501.214794] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Creating folder: Instances. Parent ref: group-v447865. {{(pid=62627) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1501.214991] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f457f8d-77f5-4025-a053-836ba6f56b66 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.223342] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Created folder: Instances in parent group-v447865. [ 1501.223555] env[62627]: DEBUG oslo.service.loopingcall [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1501.223730] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1501.223905] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d95072b4-2acf-4242-834d-97176c99f69e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.238825] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1501.238825] env[62627]: value = "task-2195593" [ 1501.238825] env[62627]: _type = "Task" [ 1501.238825] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.245371] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195593, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.259170] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.133s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.259666] env[62627]: DEBUG nova.compute.manager [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1501.748941] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195593, 'name': CreateVM_Task, 'duration_secs': 0.251578} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.749214] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1501.749715] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.749876] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.750246] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1501.750537] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4639258-1f17-4e83-901f-04609837a100 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.754843] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1501.754843] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520118b6-f843-eba3-cac8-79719612f4f9" [ 1501.754843] env[62627]: _type = "Task" [ 1501.754843] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.761988] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520118b6-f843-eba3-cac8-79719612f4f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.764198] env[62627]: DEBUG nova.compute.utils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1501.765483] env[62627]: DEBUG nova.compute.manager [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Not allocating networking since 'none' was specified. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1502.265635] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520118b6-f843-eba3-cac8-79719612f4f9, 'name': SearchDatastore_Task, 'duration_secs': 0.010403} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.266022] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.266264] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1502.266554] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1502.266723] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.266931] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1502.267467] env[62627]: DEBUG nova.compute.manager [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1502.269904] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-589af645-e6bf-48a9-b68b-a6411cb1bdc9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.279080] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1502.279080] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1502.279247] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d8391da-aa32-43a6-833e-f284820e5dbd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.283849] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1502.283849] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522496b9-0540-a796-6d69-5ca83e94c589" [ 1502.283849] env[62627]: _type = "Task" [ 1502.283849] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.291117] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522496b9-0540-a796-6d69-5ca83e94c589, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.793557] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522496b9-0540-a796-6d69-5ca83e94c589, 'name': SearchDatastore_Task, 'duration_secs': 0.007906} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.794315] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee76ebad-7ae2-4b53-8ac3-d258d643a3d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.799701] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1502.799701] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]522e65d8-bf85-989e-82ea-ab7cdbd18f96" [ 1502.799701] env[62627]: _type = "Task" [ 1502.799701] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.807060] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522e65d8-bf85-989e-82ea-ab7cdbd18f96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.984714] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.984714] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.277939] env[62627]: DEBUG nova.compute.manager [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1503.305683] env[62627]: DEBUG nova.virt.hardware [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1503.305931] env[62627]: DEBUG nova.virt.hardware [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1503.306110] env[62627]: DEBUG nova.virt.hardware [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1503.306301] env[62627]: DEBUG nova.virt.hardware [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1503.306448] env[62627]: DEBUG nova.virt.hardware [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1503.306595] env[62627]: DEBUG nova.virt.hardware [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1503.306799] env[62627]: DEBUG nova.virt.hardware [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1503.306960] env[62627]: DEBUG nova.virt.hardware [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1503.307142] env[62627]: DEBUG nova.virt.hardware [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1503.307305] env[62627]: DEBUG nova.virt.hardware [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1503.307474] env[62627]: DEBUG nova.virt.hardware [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1503.308238] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffb2c31-6b90-4ddd-9d60-0283b2906916 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.316447] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]522e65d8-bf85-989e-82ea-ab7cdbd18f96, 'name': SearchDatastore_Task, 'duration_secs': 0.009056} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.318043] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.318296] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] de1345ab-57f6-4d2c-918e-e9cdcdc16b50/de1345ab-57f6-4d2c-918e-e9cdcdc16b50.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1503.318551] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cfc57e3b-a61d-4273-aeda-6e551de40d0f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.321085] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae8a8e5-2307-4a1e-bbbe-4a850119fb91 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.334491] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1503.339811] env[62627]: DEBUG oslo.service.loopingcall [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1503.340910] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1503.341213] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1503.341213] env[62627]: value = "task-2195594" [ 1503.341213] env[62627]: _type = "Task" [ 1503.341213] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.341382] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21f4500d-ff03-4359-b25d-0432c3be3e3d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.360543] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.361671] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1503.361671] env[62627]: value = "task-2195595" [ 1503.361671] env[62627]: _type = "Task" [ 1503.361671] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.371259] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195595, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.487868] env[62627]: DEBUG nova.compute.utils [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1503.862693] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195594, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438706} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.865589] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] de1345ab-57f6-4d2c-918e-e9cdcdc16b50/de1345ab-57f6-4d2c-918e-e9cdcdc16b50.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1503.865813] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1503.866079] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e063c45-affa-4840-b3eb-081cf6e1ee36 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.872644] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195595, 'name': CreateVM_Task, 'duration_secs': 0.378604} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.873667] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1503.873965] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1503.873965] env[62627]: value = "task-2195596" [ 1503.873965] env[62627]: _type = "Task" [ 1503.873965] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.874338] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.874491] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.874815] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1503.875155] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d41ba9f-0fff-42cf-9138-c092900a43a5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.884360] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195596, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.885318] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1503.885318] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52933f3e-3700-d48c-4516-b2887f630017" [ 1503.885318] env[62627]: _type = "Task" [ 1503.885318] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.893576] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52933f3e-3700-d48c-4516-b2887f630017, 'name': SearchDatastore_Task, 'duration_secs': 0.007871} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.893870] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.894129] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1503.894368] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.894541] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.894735] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1503.894975] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cfae6f8-6594-407e-8886-23bc1f07ef03 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.901651] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1503.901834] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1503.902538] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c076cf81-3da1-4a66-8f48-3727c24e763d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.907659] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1503.907659] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52242c6e-0d84-c44d-28c1-01615be71805" [ 1503.907659] env[62627]: _type = "Task" [ 1503.907659] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.915139] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52242c6e-0d84-c44d-28c1-01615be71805, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.991356] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.385233] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195596, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057231} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.385597] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1504.386264] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f692e7-2b06-4a9d-a136-b2d955e38026 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.405665] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] de1345ab-57f6-4d2c-918e-e9cdcdc16b50/de1345ab-57f6-4d2c-918e-e9cdcdc16b50.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1504.405907] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a92f94c-5a15-4e30-801b-79dedd3ac063 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.427630] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52242c6e-0d84-c44d-28c1-01615be71805, 'name': SearchDatastore_Task, 'duration_secs': 0.007269} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.429261] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1504.429261] env[62627]: value = "task-2195597" [ 1504.429261] env[62627]: _type = "Task" [ 1504.429261] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.429453] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6b84822-cff3-4e20-befd-ad892cb944c3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.439946] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195597, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.441086] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1504.441086] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]526df722-9fa9-2bad-6e0d-ef175f72c567" [ 1504.441086] env[62627]: _type = "Task" [ 1504.441086] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.447809] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526df722-9fa9-2bad-6e0d-ef175f72c567, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.941140] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195597, 'name': ReconfigVM_Task, 'duration_secs': 0.367969} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.941491] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Reconfigured VM instance instance-0000007a to attach disk [datastore2] de1345ab-57f6-4d2c-918e-e9cdcdc16b50/de1345ab-57f6-4d2c-918e-e9cdcdc16b50.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1504.944616] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bfed0022-d66f-4bba-ad8d-3baeeab816ed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.951090] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]526df722-9fa9-2bad-6e0d-ef175f72c567, 'name': SearchDatastore_Task, 'duration_secs': 0.008854} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.952136] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.952382] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] a66936c9-e164-4431-b5e4-40cfbdb1ca8e/a66936c9-e164-4431-b5e4-40cfbdb1ca8e.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1504.952664] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1504.952664] env[62627]: value = "task-2195598" [ 1504.952664] env[62627]: _type = "Task" [ 1504.952664] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.952831] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de0c124b-ec91-4189-91ea-cae0a4e4f3c3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.961387] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195598, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.962465] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1504.962465] env[62627]: value = "task-2195599" [ 1504.962465] env[62627]: _type = "Task" [ 1504.962465] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.969311] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195599, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.063517] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.063749] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.064067] env[62627]: INFO nova.compute.manager [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Attaching volume 34258b00-8c9e-4ec9-8c4d-c99d0551604a to /dev/sdb [ 1505.094630] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e76b2ac-6389-425f-a1e4-6e58784993e3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.101519] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50de58fb-4a59-4abb-8083-b33b0aa79df3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.115303] env[62627]: DEBUG nova.virt.block_device [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Updating existing volume attachment record: ad012c51-d0a5-4f74-8c94-590e5aff4e58 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1505.464277] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195598, 'name': Rename_Task, 'duration_secs': 0.136456} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.467241] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1505.467488] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c7406d2-1cb0-4401-9215-00c1cf85d4be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.473566] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195599, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434979} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.474678] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] a66936c9-e164-4431-b5e4-40cfbdb1ca8e/a66936c9-e164-4431-b5e4-40cfbdb1ca8e.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1505.474896] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1505.475222] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1505.475222] env[62627]: value = "task-2195603" [ 1505.475222] env[62627]: _type = "Task" [ 1505.475222] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.475395] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-267322f9-6a19-43c5-8bb8-80a6da95547d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.484767] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195603, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.485836] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1505.485836] env[62627]: value = "task-2195604" [ 1505.485836] env[62627]: _type = "Task" [ 1505.485836] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.493508] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195604, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.987408] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195603, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.994495] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195604, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069309} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.994737] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1505.995521] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7010ac-887c-499d-8d9b-eacfa077e8b8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.015188] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] a66936c9-e164-4431-b5e4-40cfbdb1ca8e/a66936c9-e164-4431-b5e4-40cfbdb1ca8e.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1506.015413] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed03312b-eec9-4b06-bb22-a8163f7173c5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.033948] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1506.033948] env[62627]: value = "task-2195605" [ 1506.033948] env[62627]: _type = "Task" [ 1506.033948] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.041306] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195605, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.487711] env[62627]: DEBUG oslo_vmware.api [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195603, 'name': PowerOnVM_Task, 'duration_secs': 0.616965} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.488118] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1506.488191] env[62627]: INFO nova.compute.manager [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Took 5.34 seconds to spawn the instance on the hypervisor. [ 1506.488373] env[62627]: DEBUG nova.compute.manager [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1506.489177] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96eb00c0-a349-4c64-95db-2d59f28605a8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.543318] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.006821] env[62627]: INFO nova.compute.manager [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Took 10.03 seconds to build instance. [ 1507.044490] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.509185] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d0e9b0d3-bc39-47cd-a49e-b6fad6a5107d tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "de1345ab-57f6-4d2c-918e-e9cdcdc16b50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.542s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.544211] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195605, 'name': ReconfigVM_Task, 'duration_secs': 1.10033} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.544424] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Reconfigured VM instance instance-0000007b to attach disk [datastore2] a66936c9-e164-4431-b5e4-40cfbdb1ca8e/a66936c9-e164-4431-b5e4-40cfbdb1ca8e.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1507.545043] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10219151-c9c0-40e5-9307-f9701ccc83e4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.550405] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1507.550405] env[62627]: value = "task-2195607" [ 1507.550405] env[62627]: _type = "Task" [ 1507.550405] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.558577] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195607, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.060667] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195607, 'name': Rename_Task, 'duration_secs': 0.12158} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.060952] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1508.061213] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b47e2f6-9577-40e1-b766-a949f3c52de8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.067116] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1508.067116] env[62627]: value = "task-2195608" [ 1508.067116] env[62627]: _type = "Task" [ 1508.067116] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.074171] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.577403] env[62627]: DEBUG oslo_vmware.api [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195608, 'name': PowerOnVM_Task, 'duration_secs': 0.44111} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.577772] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1508.577813] env[62627]: INFO nova.compute.manager [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Took 5.30 seconds to spawn the instance on the hypervisor. [ 1508.577957] env[62627]: DEBUG nova.compute.manager [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1508.578793] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62280f9-2590-4251-9f9f-b9c6724b550a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.093360] env[62627]: INFO nova.compute.manager [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Took 11.95 seconds to build instance. [ 1509.595106] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e68eb96b-5456-446f-a3ac-c87a60844179 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "a66936c9-e164-4431-b5e4-40cfbdb1ca8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.459s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.663337] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1509.663584] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447870', 'volume_id': '34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'name': 'volume-34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '54b2cc24-6652-4cc4-93cc-b059f8a9be86', 'attached_at': '', 'detached_at': '', 'volume_id': '34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'serial': '34258b00-8c9e-4ec9-8c4d-c99d0551604a'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1509.664510] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c875487a-9183-49b7-9a6f-63420ea309b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.681016] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649d718c-6302-4b69-a7cb-1b1e7da98194 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.705099] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] volume-34258b00-8c9e-4ec9-8c4d-c99d0551604a/volume-34258b00-8c9e-4ec9-8c4d-c99d0551604a.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1509.705343] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab2518d2-15b2-4c46-a15a-f5b42b78d8c4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.722439] env[62627]: DEBUG oslo_vmware.api [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1509.722439] env[62627]: value = "task-2195609" [ 1509.722439] env[62627]: _type = "Task" [ 1509.722439] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.729731] env[62627]: DEBUG oslo_vmware.api [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195609, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.052928] env[62627]: INFO nova.compute.manager [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Rebuilding instance [ 1510.093359] env[62627]: DEBUG nova.compute.manager [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1510.094245] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0617e69-50e6-485f-bc85-eefb308de63a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.231678] env[62627]: DEBUG oslo_vmware.api [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195609, 'name': ReconfigVM_Task, 'duration_secs': 0.397905} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.231952] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Reconfigured VM instance instance-00000079 to attach disk [datastore2] volume-34258b00-8c9e-4ec9-8c4d-c99d0551604a/volume-34258b00-8c9e-4ec9-8c4d-c99d0551604a.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1510.236509] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bf85ed7-ec9e-423c-b761-784d03f12373 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.250227] env[62627]: DEBUG oslo_vmware.api [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1510.250227] env[62627]: value = "task-2195610" [ 1510.250227] env[62627]: _type = "Task" [ 1510.250227] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.257358] env[62627]: DEBUG oslo_vmware.api [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195610, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.759962] env[62627]: DEBUG oslo_vmware.api [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195610, 'name': ReconfigVM_Task, 'duration_secs': 0.126665} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.760368] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447870', 'volume_id': '34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'name': 'volume-34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '54b2cc24-6652-4cc4-93cc-b059f8a9be86', 'attached_at': '', 'detached_at': '', 'volume_id': '34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'serial': '34258b00-8c9e-4ec9-8c4d-c99d0551604a'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1511.106153] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1511.106452] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1af4afb5-bc65-4b5b-860b-67fc420e2065 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.114091] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1511.114091] env[62627]: value = "task-2195611" [ 1511.114091] env[62627]: _type = "Task" [ 1511.114091] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.122870] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.622965] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195611, 'name': PowerOffVM_Task, 'duration_secs': 0.166863} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.623277] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1511.623508] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1511.624279] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5dcf29-2669-4cd2-accf-66797cd25aa0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.630462] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1511.630667] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75878a58-8399-4d52-b973-d789ee518074 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.653774] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1511.653970] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1511.654170] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Deleting the datastore file [datastore2] a66936c9-e164-4431-b5e4-40cfbdb1ca8e {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1511.654399] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-723861a6-1146-469c-9c48-cee67efac18a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.660392] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1511.660392] env[62627]: value = "task-2195613" [ 1511.660392] env[62627]: _type = "Task" [ 1511.660392] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.667788] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195613, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.797093] env[62627]: DEBUG nova.objects.instance [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'flavor' on Instance uuid 54b2cc24-6652-4cc4-93cc-b059f8a9be86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1512.171071] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195613, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110205} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.171298] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1512.171488] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1512.171659] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1512.302025] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7733d146-bf33-4735-a21a-7e7beaeb4453 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.238s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.386816] env[62627]: DEBUG oslo_concurrency.lockutils [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.387073] env[62627]: DEBUG oslo_concurrency.lockutils [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.387253] env[62627]: DEBUG nova.compute.manager [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1512.388133] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9c4b21-5ecb-44d1-b170-dc206b8f4dfe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.394840] env[62627]: DEBUG nova.compute.manager [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62627) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1512.395392] env[62627]: DEBUG nova.objects.instance [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'flavor' on Instance uuid 54b2cc24-6652-4cc4-93cc-b059f8a9be86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1513.044906] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.045221] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.207488] env[62627]: DEBUG nova.virt.hardware [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1513.207734] env[62627]: DEBUG nova.virt.hardware [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1513.207890] env[62627]: DEBUG nova.virt.hardware [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1513.208085] env[62627]: DEBUG nova.virt.hardware [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1513.208233] env[62627]: DEBUG nova.virt.hardware [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1513.208378] env[62627]: DEBUG nova.virt.hardware [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1513.208581] env[62627]: DEBUG nova.virt.hardware [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1513.208756] env[62627]: DEBUG nova.virt.hardware [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1513.208975] env[62627]: DEBUG nova.virt.hardware [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1513.209165] env[62627]: DEBUG nova.virt.hardware [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1513.209355] env[62627]: DEBUG nova.virt.hardware [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1513.210221] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e653e64-e4be-472c-a2c4-4920d5be9dbd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.218268] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008001d2-8c62-4d78-ac77-049555e2ec94 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.231239] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Instance VIF info [] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1513.236793] env[62627]: DEBUG oslo.service.loopingcall [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1513.237014] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1513.237216] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42934cc4-6fe3-4c94-adb8-36d54ebf9ab1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.253672] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1513.253672] env[62627]: value = "task-2195614" [ 1513.253672] env[62627]: _type = "Task" [ 1513.253672] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.263240] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195614, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.401856] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1513.402208] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1187322a-f452-41d1-baba-e2b10487e91d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.410471] env[62627]: DEBUG oslo_vmware.api [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1513.410471] env[62627]: value = "task-2195615" [ 1513.410471] env[62627]: _type = "Task" [ 1513.410471] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.419617] env[62627]: DEBUG oslo_vmware.api [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195615, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.551610] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.551827] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1513.551969] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Rebuilding the list of instances to heal {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1513.763362] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195614, 'name': CreateVM_Task, 'duration_secs': 0.273346} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.763546] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1513.763910] env[62627]: DEBUG oslo_concurrency.lockutils [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1513.764084] env[62627]: DEBUG oslo_concurrency.lockutils [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1513.764416] env[62627]: DEBUG oslo_concurrency.lockutils [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1513.764663] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-845ecba1-6f3a-4602-82b8-37409102620a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.768717] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1513.768717] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]520da009-2615-8654-bdf3-fa6fdd6f3cd1" [ 1513.768717] env[62627]: _type = "Task" [ 1513.768717] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.776140] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520da009-2615-8654-bdf3-fa6fdd6f3cd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.919561] env[62627]: DEBUG oslo_vmware.api [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195615, 'name': PowerOffVM_Task, 'duration_secs': 0.196093} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.919817] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1513.920015] env[62627]: DEBUG nova.compute.manager [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1513.920743] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb089c36-fdff-4ba1-a9e8-9e7ba66fecd6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.082241] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.082533] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquired lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.082533] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Forcefully refreshing network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1514.082675] env[62627]: DEBUG nova.objects.instance [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lazy-loading 'info_cache' on Instance uuid 77b68106-1abe-426b-89ce-3495776b9233 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1514.278699] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]520da009-2615-8654-bdf3-fa6fdd6f3cd1, 'name': SearchDatastore_Task, 'duration_secs': 0.009013} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.279059] env[62627]: DEBUG oslo_concurrency.lockutils [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.279306] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1514.279534] env[62627]: DEBUG oslo_concurrency.lockutils [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.279677] env[62627]: DEBUG oslo_concurrency.lockutils [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.279852] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1514.280126] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb25df04-3d20-4515-b395-d3ea82f74d20 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.288083] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1514.288264] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1514.288958] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c47b520-ae59-4975-b9ed-9ea88277db7f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.295223] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1514.295223] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c2d299-2c75-311f-c16b-920f70df0b31" [ 1514.295223] env[62627]: _type = "Task" [ 1514.295223] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.302325] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c2d299-2c75-311f-c16b-920f70df0b31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.432146] env[62627]: DEBUG oslo_concurrency.lockutils [None req-38075c93-dd12-49c7-afd7-da9e057c2953 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.045s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.750956] env[62627]: DEBUG nova.objects.instance [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'flavor' on Instance uuid 54b2cc24-6652-4cc4-93cc-b059f8a9be86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1514.805902] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52c2d299-2c75-311f-c16b-920f70df0b31, 'name': SearchDatastore_Task, 'duration_secs': 0.007827} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.806749] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5e6f868-e99d-40c5-9c00-7c4fb41806c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.811409] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1514.811409] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]529ce7a2-c3e3-1a4c-ad68-8da60f2c68bf" [ 1514.811409] env[62627]: _type = "Task" [ 1514.811409] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.818526] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529ce7a2-c3e3-1a4c-ad68-8da60f2c68bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.255242] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.255691] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquired lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.255691] env[62627]: DEBUG nova.network.neutron [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1515.255764] env[62627]: DEBUG nova.objects.instance [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'info_cache' on Instance uuid 54b2cc24-6652-4cc4-93cc-b059f8a9be86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1515.321454] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]529ce7a2-c3e3-1a4c-ad68-8da60f2c68bf, 'name': SearchDatastore_Task, 'duration_secs': 0.01044} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.321686] env[62627]: DEBUG oslo_concurrency.lockutils [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.321962] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] a66936c9-e164-4431-b5e4-40cfbdb1ca8e/a66936c9-e164-4431-b5e4-40cfbdb1ca8e.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1515.322233] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f0dccde-cfc5-426d-84d7-7dbcaa9f353c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.328091] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1515.328091] env[62627]: value = "task-2195616" [ 1515.328091] env[62627]: _type = "Task" [ 1515.328091] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.335448] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195616, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.759007] env[62627]: DEBUG nova.objects.base [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Object Instance<54b2cc24-6652-4cc4-93cc-b059f8a9be86> lazy-loaded attributes: flavor,info_cache {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1515.817170] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating instance_info_cache with network_info: [{"id": "54f0bc09-a4b0-47de-a805-27b17f146e00", "address": "fa:16:3e:50:f4:19", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54f0bc09-a4", "ovs_interfaceid": "54f0bc09-a4b0-47de-a805-27b17f146e00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.837769] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195616, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.423077} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.837992] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] a66936c9-e164-4431-b5e4-40cfbdb1ca8e/a66936c9-e164-4431-b5e4-40cfbdb1ca8e.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1515.838212] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1515.838435] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f04a58b-ace9-4e6b-96b4-aef01445f2e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.844795] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1515.844795] env[62627]: value = "task-2195617" [ 1515.844795] env[62627]: _type = "Task" [ 1515.844795] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.851694] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195617, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.319811] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Releasing lock "refresh_cache-77b68106-1abe-426b-89ce-3495776b9233" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.320105] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updated the network info_cache for instance {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1516.322304] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.322482] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.322921] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.323100] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.323265] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.323394] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.323548] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1516.323655] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.357331] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195617, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060595} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.357578] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1516.358354] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c406a230-d1f5-492a-80f3-f610cb32aa1f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.377345] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] a66936c9-e164-4431-b5e4-40cfbdb1ca8e/a66936c9-e164-4431-b5e4-40cfbdb1ca8e.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1516.379559] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c70e6ef8-e69f-4d6b-92ed-bd7679d9e820 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.397476] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1516.397476] env[62627]: value = "task-2195618" [ 1516.397476] env[62627]: _type = "Task" [ 1516.397476] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.404944] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195618, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.484508] env[62627]: DEBUG nova.network.neutron [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Updating instance_info_cache with network_info: [{"id": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "address": "fa:16:3e:80:8b:50", "network": {"id": "da92dc57-06a7-46e5-a51b-1f0a8d7373a7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-952102388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b88e3a0d8ee4e3682bd312872dcbc0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ce94b7-c4", "ovs_interfaceid": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.827115] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.827352] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.827524] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.827681] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1516.829147] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c974828-2c03-427f-9dfa-6f656969eeda {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.837065] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68221242-1705-440a-ac31-5ff295b11bb6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.850128] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6086a5-2e6d-4c01-b667-54a0130b09bb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.856035] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ceaa45f-1d6d-406d-8bd8-d860225a9342 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.883220] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181066MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1516.883368] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.883553] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.905504] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195618, 'name': ReconfigVM_Task, 'duration_secs': 0.249845} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.906471] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Reconfigured VM instance instance-0000007b to attach disk [datastore2] a66936c9-e164-4431-b5e4-40cfbdb1ca8e/a66936c9-e164-4431-b5e4-40cfbdb1ca8e.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1516.907048] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2604c585-fbed-481d-a302-003fbc1a4179 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.913407] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1516.913407] env[62627]: value = "task-2195619" [ 1516.913407] env[62627]: _type = "Task" [ 1516.913407] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.922351] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195619, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.987204] env[62627]: DEBUG oslo_concurrency.lockutils [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Releasing lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.422785] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195619, 'name': Rename_Task, 'duration_secs': 0.130886} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.423147] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1517.423205] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4d2219d-e246-43f4-89f4-e657d4ca2e2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.429463] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1517.429463] env[62627]: value = "task-2195620" [ 1517.429463] env[62627]: _type = "Task" [ 1517.429463] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.436597] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.911309] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 54b2cc24-6652-4cc4-93cc-b059f8a9be86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1517.911464] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 77b68106-1abe-426b-89ce-3495776b9233 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1517.911587] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance de1345ab-57f6-4d2c-918e-e9cdcdc16b50 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1517.911703] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance a66936c9-e164-4431-b5e4-40cfbdb1ca8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1517.911891] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1517.912075] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1517.939444] env[62627]: DEBUG oslo_vmware.api [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195620, 'name': PowerOnVM_Task, 'duration_secs': 0.389997} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.941899] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1517.942153] env[62627]: DEBUG nova.compute.manager [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1517.943059] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af536f9-590f-405d-ae04-24387a94c3b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.977858] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e51ec0-8b96-4d58-ba3f-d51b981ba92d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.985667] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd831954-611b-4cd7-9ac2-066c55679432 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.015024] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1518.015584] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ef6240d-330a-41e1-8224-91dc839b6802 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.017480] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e298cc-31eb-4b40-a532-39f54b618b40 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.026830] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3ff447-f41f-4116-801a-6b685e4d437e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.030377] env[62627]: DEBUG oslo_vmware.api [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1518.030377] env[62627]: value = "task-2195621" [ 1518.030377] env[62627]: _type = "Task" [ 1518.030377] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.041103] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1518.046973] env[62627]: DEBUG oslo_vmware.api [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195621, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.460717] env[62627]: DEBUG oslo_concurrency.lockutils [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.539935] env[62627]: DEBUG oslo_vmware.api [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195621, 'name': PowerOnVM_Task, 'duration_secs': 0.442098} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.540158] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1518.540348] env[62627]: DEBUG nova.compute.manager [None req-d06c3f29-3c6b-4347-a9d8-fc4ba270b0bd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1518.541096] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26dbbb8-0784-44eb-8cb7-a83e851bcad9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.543978] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1518.814464] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "a66936c9-e164-4431-b5e4-40cfbdb1ca8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.814721] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "a66936c9-e164-4431-b5e4-40cfbdb1ca8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.814928] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "a66936c9-e164-4431-b5e4-40cfbdb1ca8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.815126] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "a66936c9-e164-4431-b5e4-40cfbdb1ca8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.815297] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "a66936c9-e164-4431-b5e4-40cfbdb1ca8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.817306] env[62627]: INFO nova.compute.manager [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Terminating instance [ 1519.048820] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1519.049074] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.165s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.049365] env[62627]: DEBUG oslo_concurrency.lockutils [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.589s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.049549] env[62627]: DEBUG nova.objects.instance [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62627) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1519.320651] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "refresh_cache-a66936c9-e164-4431-b5e4-40cfbdb1ca8e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.320825] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquired lock "refresh_cache-a66936c9-e164-4431-b5e4-40cfbdb1ca8e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.321063] env[62627]: DEBUG nova.network.neutron [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1519.839829] env[62627]: DEBUG nova.network.neutron [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1519.893598] env[62627]: DEBUG nova.network.neutron [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.058090] env[62627]: DEBUG oslo_concurrency.lockutils [None req-23b01030-25a1-426b-be88-9503fc802ccf tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.395878] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Releasing lock "refresh_cache-a66936c9-e164-4431-b5e4-40cfbdb1ca8e" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.396379] env[62627]: DEBUG nova.compute.manager [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1520.396602] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1520.399175] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca11d0de-a6e2-4c40-beed-107282842c99 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.407324] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1520.407574] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-632a1fed-8a70-471f-b707-fd2b042cd1f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.413577] env[62627]: DEBUG oslo_vmware.api [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1520.413577] env[62627]: value = "task-2195622" [ 1520.413577] env[62627]: _type = "Task" [ 1520.413577] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.421656] env[62627]: DEBUG oslo_vmware.api [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195622, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.923678] env[62627]: DEBUG oslo_vmware.api [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195622, 'name': PowerOffVM_Task, 'duration_secs': 0.126873} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.924113] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1520.924113] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1520.928114] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38d53a56-97fb-4d67-b1ca-44e692663fcd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.955478] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1520.955704] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1520.955881] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Deleting the datastore file [datastore2] a66936c9-e164-4431-b5e4-40cfbdb1ca8e {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1520.956157] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d349bb2a-cb31-4aab-af63-73cbd06794f8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.962501] env[62627]: DEBUG oslo_vmware.api [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1520.962501] env[62627]: value = "task-2195624" [ 1520.962501] env[62627]: _type = "Task" [ 1520.962501] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.969890] env[62627]: DEBUG oslo_vmware.api [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195624, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.472816] env[62627]: DEBUG oslo_vmware.api [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195624, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087989} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.473077] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1521.473285] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1521.473466] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1521.473636] env[62627]: INFO nova.compute.manager [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1521.473878] env[62627]: DEBUG oslo.service.loopingcall [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1521.474095] env[62627]: DEBUG nova.compute.manager [-] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1521.474193] env[62627]: DEBUG nova.network.neutron [-] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1521.488771] env[62627]: DEBUG nova.network.neutron [-] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1521.991779] env[62627]: DEBUG nova.network.neutron [-] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.494434] env[62627]: INFO nova.compute.manager [-] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Took 1.02 seconds to deallocate network for instance. [ 1523.001320] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.001694] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.001822] env[62627]: DEBUG nova.objects.instance [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lazy-loading 'resources' on Instance uuid a66936c9-e164-4431-b5e4-40cfbdb1ca8e {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1523.562958] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c692d2d8-f56c-4e94-9078-97f62d87b8c3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.570425] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22629c5e-33e6-4ec0-8c43-f093281c01e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.600174] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6307370d-b178-49bd-8cc1-0405d56ad2d2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.606801] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c2c828-2af6-4d18-abde-43406c97ce5b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.619413] env[62627]: DEBUG nova.compute.provider_tree [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1524.122653] env[62627]: DEBUG nova.scheduler.client.report [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1524.627631] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.626s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.649660] env[62627]: INFO nova.scheduler.client.report [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Deleted allocations for instance a66936c9-e164-4431-b5e4-40cfbdb1ca8e [ 1525.157260] env[62627]: DEBUG oslo_concurrency.lockutils [None req-7a51d857-c162-4f7b-afc1-d8df820eb546 tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "a66936c9-e164-4431-b5e4-40cfbdb1ca8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.342s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.270857] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "de1345ab-57f6-4d2c-918e-e9cdcdc16b50" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.271207] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "de1345ab-57f6-4d2c-918e-e9cdcdc16b50" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.271448] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "de1345ab-57f6-4d2c-918e-e9cdcdc16b50-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.271640] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "de1345ab-57f6-4d2c-918e-e9cdcdc16b50-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.271809] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "de1345ab-57f6-4d2c-918e-e9cdcdc16b50-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.273865] env[62627]: INFO nova.compute.manager [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Terminating instance [ 1525.777829] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "refresh_cache-de1345ab-57f6-4d2c-918e-e9cdcdc16b50" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.778111] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquired lock "refresh_cache-de1345ab-57f6-4d2c-918e-e9cdcdc16b50" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.778342] env[62627]: DEBUG nova.network.neutron [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1526.297179] env[62627]: DEBUG nova.network.neutron [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1526.351853] env[62627]: DEBUG nova.network.neutron [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.854650] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Releasing lock "refresh_cache-de1345ab-57f6-4d2c-918e-e9cdcdc16b50" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.855076] env[62627]: DEBUG nova.compute.manager [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1526.855274] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1526.856194] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467cc936-0e20-46f4-98f8-5ac011a28fb6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.863895] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1526.864137] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69cd2d85-3891-4120-8769-1ef5ca4ce991 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.870098] env[62627]: DEBUG oslo_vmware.api [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1526.870098] env[62627]: value = "task-2195625" [ 1526.870098] env[62627]: _type = "Task" [ 1526.870098] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.877376] env[62627]: DEBUG oslo_vmware.api [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195625, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.381683] env[62627]: DEBUG oslo_vmware.api [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195625, 'name': PowerOffVM_Task, 'duration_secs': 0.218529} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.382136] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1527.382136] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1527.382337] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4007e6c-2c24-4e64-a945-592bf9be9ed1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.407138] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1527.407340] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1527.407519] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Deleting the datastore file [datastore2] de1345ab-57f6-4d2c-918e-e9cdcdc16b50 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1527.407756] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d856b4b-faf2-4af1-b899-9ec7c286524d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.413385] env[62627]: DEBUG oslo_vmware.api [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for the task: (returnval){ [ 1527.413385] env[62627]: value = "task-2195627" [ 1527.413385] env[62627]: _type = "Task" [ 1527.413385] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.420721] env[62627]: DEBUG oslo_vmware.api [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195627, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.923801] env[62627]: DEBUG oslo_vmware.api [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Task: {'id': task-2195627, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085487} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.924051] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1527.924276] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1527.924463] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1527.924637] env[62627]: INFO nova.compute.manager [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1527.924878] env[62627]: DEBUG oslo.service.loopingcall [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.925076] env[62627]: DEBUG nova.compute.manager [-] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1527.925177] env[62627]: DEBUG nova.network.neutron [-] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1527.941154] env[62627]: DEBUG nova.network.neutron [-] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1528.443193] env[62627]: DEBUG nova.network.neutron [-] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.945609] env[62627]: INFO nova.compute.manager [-] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Took 1.02 seconds to deallocate network for instance. [ 1529.037429] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "77b68106-1abe-426b-89ce-3495776b9233" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.037691] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.452823] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.453213] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.453353] env[62627]: DEBUG nova.objects.instance [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lazy-loading 'resources' on Instance uuid de1345ab-57f6-4d2c-918e-e9cdcdc16b50 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1529.540691] env[62627]: INFO nova.compute.manager [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Detaching volume b3d23526-ef45-4e03-a5a6-5df1c7626e46 [ 1529.569812] env[62627]: INFO nova.virt.block_device [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Attempting to driver detach volume b3d23526-ef45-4e03-a5a6-5df1c7626e46 from mountpoint /dev/sdb [ 1529.570054] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1529.570243] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447862', 'volume_id': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'name': 'volume-b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '77b68106-1abe-426b-89ce-3495776b9233', 'attached_at': '', 'detached_at': '', 'volume_id': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'serial': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1529.571128] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e70538-60a4-4493-a730-fe21351fc5ac {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.592619] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1edb11e6-3ed2-4c30-ac6e-ca2946385541 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.599048] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a39421e-2075-40eb-919e-284b4ad1dd23 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.619763] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d0a8c4-dc4d-46aa-847f-0eac3cc29303 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.634059] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] The volume has not been displaced from its original location: [datastore2] volume-b3d23526-ef45-4e03-a5a6-5df1c7626e46/volume-b3d23526-ef45-4e03-a5a6-5df1c7626e46.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1529.639122] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Reconfiguring VM instance instance-00000076 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1529.639383] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9603d4ff-369c-40ee-b602-f53389e507b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.656243] env[62627]: DEBUG oslo_vmware.api [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1529.656243] env[62627]: value = "task-2195628" [ 1529.656243] env[62627]: _type = "Task" [ 1529.656243] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.663397] env[62627]: DEBUG oslo_vmware.api [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195628, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.008919] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731c11ee-cbe9-4a17-94e8-b971e14e229d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.016372] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a6743e-2009-424c-814e-a1852ef3e965 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.044902] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc018f7a-07a4-4e47-ad87-b00f50fe331c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.051501] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209441c4-89cc-4b59-95ec-e91848a580ff {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.064307] env[62627]: DEBUG nova.compute.provider_tree [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.166013] env[62627]: DEBUG oslo_vmware.api [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195628, 'name': ReconfigVM_Task, 'duration_secs': 0.210055} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.166302] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Reconfigured VM instance instance-00000076 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1530.170911] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba3ef4de-5b4e-41f6-bb5d-464db7dd4cd2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.185240] env[62627]: DEBUG oslo_vmware.api [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1530.185240] env[62627]: value = "task-2195629" [ 1530.185240] env[62627]: _type = "Task" [ 1530.185240] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.194135] env[62627]: DEBUG oslo_vmware.api [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195629, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.567718] env[62627]: DEBUG nova.scheduler.client.report [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1530.694895] env[62627]: DEBUG oslo_vmware.api [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195629, 'name': ReconfigVM_Task, 'duration_secs': 0.139958} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.695217] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447862', 'volume_id': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'name': 'volume-b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '77b68106-1abe-426b-89ce-3495776b9233', 'attached_at': '', 'detached_at': '', 'volume_id': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46', 'serial': 'b3d23526-ef45-4e03-a5a6-5df1c7626e46'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1531.072607] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.619s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.090455] env[62627]: INFO nova.scheduler.client.report [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Deleted allocations for instance de1345ab-57f6-4d2c-918e-e9cdcdc16b50 [ 1531.236370] env[62627]: DEBUG nova.objects.instance [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lazy-loading 'flavor' on Instance uuid 77b68106-1abe-426b-89ce-3495776b9233 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1531.598587] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c8553dcb-e43c-4974-b9ea-6760cfa2823f tempest-ServerShowV247Test-250272930 tempest-ServerShowV247Test-250272930-project-member] Lock "de1345ab-57f6-4d2c-918e-e9cdcdc16b50" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.327s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.246535] env[62627]: DEBUG oslo_concurrency.lockutils [None req-07f41183-8151-49fa-8660-c5d7c8689b35 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.209s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.292224] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "77b68106-1abe-426b-89ce-3495776b9233" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.292596] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.292596] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "77b68106-1abe-426b-89ce-3495776b9233-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.293877] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.293877] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.295495] env[62627]: INFO nova.compute.manager [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Terminating instance [ 1533.801694] env[62627]: DEBUG nova.compute.manager [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1533.802043] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1533.802863] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3039c1-53c9-44e1-956c-f11c9e23f0bb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.810514] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1533.810742] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9518409-18ef-4f15-a21a-67a73a726def {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.816850] env[62627]: DEBUG oslo_vmware.api [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1533.816850] env[62627]: value = "task-2195630" [ 1533.816850] env[62627]: _type = "Task" [ 1533.816850] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.824289] env[62627]: DEBUG oslo_vmware.api [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195630, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.326814] env[62627]: DEBUG oslo_vmware.api [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195630, 'name': PowerOffVM_Task, 'duration_secs': 0.198748} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.327288] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1534.327288] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1534.327399] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b804040-7993-4d26-8690-77c717d49ee5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.387504] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1534.387798] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1534.387925] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleting the datastore file [datastore2] 77b68106-1abe-426b-89ce-3495776b9233 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1534.388199] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77af66a9-ef13-4dbd-8634-ea465eee6014 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.394593] env[62627]: DEBUG oslo_vmware.api [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1534.394593] env[62627]: value = "task-2195632" [ 1534.394593] env[62627]: _type = "Task" [ 1534.394593] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.401759] env[62627]: DEBUG oslo_vmware.api [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.903964] env[62627]: DEBUG oslo_vmware.api [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126728} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.904332] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1534.904619] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1534.904813] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1534.904988] env[62627]: INFO nova.compute.manager [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1534.905248] env[62627]: DEBUG oslo.service.loopingcall [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1534.905436] env[62627]: DEBUG nova.compute.manager [-] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1534.905526] env[62627]: DEBUG nova.network.neutron [-] [instance: 77b68106-1abe-426b-89ce-3495776b9233] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1535.378263] env[62627]: DEBUG nova.compute.manager [req-c22ebff6-92a0-44d9-b07a-033ded799ccc req-30cab441-6d8f-4329-9c9b-b9183ceedcf5 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Received event network-vif-deleted-54f0bc09-a4b0-47de-a805-27b17f146e00 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1535.378547] env[62627]: INFO nova.compute.manager [req-c22ebff6-92a0-44d9-b07a-033ded799ccc req-30cab441-6d8f-4329-9c9b-b9183ceedcf5 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Neutron deleted interface 54f0bc09-a4b0-47de-a805-27b17f146e00; detaching it from the instance and deleting it from the info cache [ 1535.378719] env[62627]: DEBUG nova.network.neutron [req-c22ebff6-92a0-44d9-b07a-033ded799ccc req-30cab441-6d8f-4329-9c9b-b9183ceedcf5 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.858273] env[62627]: DEBUG nova.network.neutron [-] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.881541] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1bda3ce-771a-4c24-a950-82f099e4a964 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.891197] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a0084c-a8ed-4364-a279-2421c8606472 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.915725] env[62627]: DEBUG nova.compute.manager [req-c22ebff6-92a0-44d9-b07a-033ded799ccc req-30cab441-6d8f-4329-9c9b-b9183ceedcf5 service nova] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Detach interface failed, port_id=54f0bc09-a4b0-47de-a805-27b17f146e00, reason: Instance 77b68106-1abe-426b-89ce-3495776b9233 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1536.361752] env[62627]: INFO nova.compute.manager [-] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Took 1.46 seconds to deallocate network for instance. [ 1536.868318] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.868706] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.868752] env[62627]: DEBUG nova.objects.instance [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lazy-loading 'resources' on Instance uuid 77b68106-1abe-426b-89ce-3495776b9233 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1537.388739] env[62627]: DEBUG nova.scheduler.client.report [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1537.402210] env[62627]: DEBUG nova.scheduler.client.report [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1537.402449] env[62627]: DEBUG nova.compute.provider_tree [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1537.412167] env[62627]: DEBUG nova.scheduler.client.report [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1537.428440] env[62627]: DEBUG nova.scheduler.client.report [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1537.464475] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6028752e-32d8-4031-9a96-8ecb789511e5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.472406] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e08591c-ab0f-42d4-ac03-1f2dd374bcc7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.506676] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628b1f45-4dc8-4808-9750-7ab14bc9a0df {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.513945] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e27843-1e26-46fc-b08c-8dd5e54ecd1b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.526786] env[62627]: DEBUG nova.compute.provider_tree [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1538.029703] env[62627]: DEBUG nova.scheduler.client.report [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1538.534060] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.665s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.551233] env[62627]: INFO nova.scheduler.client.report [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleted allocations for instance 77b68106-1abe-426b-89ce-3495776b9233 [ 1539.058943] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1ec5b00b-6ab9-4077-83c6-458d8b2905c9 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "77b68106-1abe-426b-89ce-3495776b9233" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.766s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.187817] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.187817] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.689903] env[62627]: DEBUG nova.compute.manager [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1542.214686] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.215039] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.216555] env[62627]: INFO nova.compute.claims [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1543.266036] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abf21a3-a6ed-4328-94a0-ef6f5f48848b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.277837] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43599fe1-aac0-4e78-8fa7-cdb4be025d37 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.326296] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fccc9d7-ac8f-4a25-8aa3-f479cd116840 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.337297] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d811559-18ed-42e6-9c3d-6dc86a62ccba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.358266] env[62627]: DEBUG nova.compute.provider_tree [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1543.863981] env[62627]: DEBUG nova.scheduler.client.report [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1544.369292] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.154s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.369767] env[62627]: DEBUG nova.compute.manager [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1544.875260] env[62627]: DEBUG nova.compute.utils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1544.876659] env[62627]: DEBUG nova.compute.manager [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1544.876826] env[62627]: DEBUG nova.network.neutron [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1544.925898] env[62627]: DEBUG nova.policy [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9744688d4c8c41e6a3091a27f8f63104', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6095198e22f645698a7c566414eba47c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1545.190744] env[62627]: DEBUG nova.network.neutron [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Successfully created port: 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1545.380879] env[62627]: DEBUG nova.compute.manager [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1546.392160] env[62627]: DEBUG nova.compute.manager [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1546.417837] env[62627]: DEBUG nova.virt.hardware [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1546.418090] env[62627]: DEBUG nova.virt.hardware [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1546.418249] env[62627]: DEBUG nova.virt.hardware [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1546.418434] env[62627]: DEBUG nova.virt.hardware [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1546.418580] env[62627]: DEBUG nova.virt.hardware [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1546.418725] env[62627]: DEBUG nova.virt.hardware [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1546.418924] env[62627]: DEBUG nova.virt.hardware [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1546.419103] env[62627]: DEBUG nova.virt.hardware [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1546.419269] env[62627]: DEBUG nova.virt.hardware [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1546.419430] env[62627]: DEBUG nova.virt.hardware [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1546.419597] env[62627]: DEBUG nova.virt.hardware [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1546.420500] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c6a123-1b36-488b-bf3d-38ca5dbeda2f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.428640] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417ce1f2-2050-4ff8-b57e-af7c9b11bb9f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.559081] env[62627]: DEBUG nova.compute.manager [req-c7a25c63-3929-497a-97c8-942ae784a779 req-c2c5cc1a-8fdb-4c66-ae60-a47d32a5b034 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Received event network-vif-plugged-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1546.559341] env[62627]: DEBUG oslo_concurrency.lockutils [req-c7a25c63-3929-497a-97c8-942ae784a779 req-c2c5cc1a-8fdb-4c66-ae60-a47d32a5b034 service nova] Acquiring lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.559566] env[62627]: DEBUG oslo_concurrency.lockutils [req-c7a25c63-3929-497a-97c8-942ae784a779 req-c2c5cc1a-8fdb-4c66-ae60-a47d32a5b034 service nova] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.559716] env[62627]: DEBUG oslo_concurrency.lockutils [req-c7a25c63-3929-497a-97c8-942ae784a779 req-c2c5cc1a-8fdb-4c66-ae60-a47d32a5b034 service nova] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.559887] env[62627]: DEBUG nova.compute.manager [req-c7a25c63-3929-497a-97c8-942ae784a779 req-c2c5cc1a-8fdb-4c66-ae60-a47d32a5b034 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] No waiting events found dispatching network-vif-plugged-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1546.560191] env[62627]: WARNING nova.compute.manager [req-c7a25c63-3929-497a-97c8-942ae784a779 req-c2c5cc1a-8fdb-4c66-ae60-a47d32a5b034 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Received unexpected event network-vif-plugged-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 for instance with vm_state building and task_state spawning. [ 1546.643701] env[62627]: DEBUG nova.network.neutron [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Successfully updated port: 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1547.146240] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.146414] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.146575] env[62627]: DEBUG nova.network.neutron [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1547.678503] env[62627]: DEBUG nova.network.neutron [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1547.995804] env[62627]: DEBUG nova.network.neutron [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updating instance_info_cache with network_info: [{"id": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "address": "fa:16:3e:5f:eb:ca", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374cdd5a-3b", "ovs_interfaceid": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.499018] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.499381] env[62627]: DEBUG nova.compute.manager [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Instance network_info: |[{"id": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "address": "fa:16:3e:5f:eb:ca", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374cdd5a-3b", "ovs_interfaceid": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1548.499812] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:eb:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd38fdec-d092-4a84-ab41-685f6dbb4f29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '374cdd5a-3b25-49a6-a1a0-45d9d5a331c1', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1548.507248] env[62627]: DEBUG oslo.service.loopingcall [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1548.507453] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1548.507666] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-474d5150-9bb1-4062-889b-e0c1323516b4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.528486] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1548.528486] env[62627]: value = "task-2195633" [ 1548.528486] env[62627]: _type = "Task" [ 1548.528486] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.536109] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195633, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.583720] env[62627]: DEBUG nova.compute.manager [req-2297df5e-2be6-4bb9-a9ca-8da4c5437d4d req-444559b8-b275-4d1c-8076-76bbe9f31720 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Received event network-changed-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1548.583938] env[62627]: DEBUG nova.compute.manager [req-2297df5e-2be6-4bb9-a9ca-8da4c5437d4d req-444559b8-b275-4d1c-8076-76bbe9f31720 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Refreshing instance network info cache due to event network-changed-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1548.584206] env[62627]: DEBUG oslo_concurrency.lockutils [req-2297df5e-2be6-4bb9-a9ca-8da4c5437d4d req-444559b8-b275-4d1c-8076-76bbe9f31720 service nova] Acquiring lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.584381] env[62627]: DEBUG oslo_concurrency.lockutils [req-2297df5e-2be6-4bb9-a9ca-8da4c5437d4d req-444559b8-b275-4d1c-8076-76bbe9f31720 service nova] Acquired lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.584571] env[62627]: DEBUG nova.network.neutron [req-2297df5e-2be6-4bb9-a9ca-8da4c5437d4d req-444559b8-b275-4d1c-8076-76bbe9f31720 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Refreshing network info cache for port 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1549.038439] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195633, 'name': CreateVM_Task, 'duration_secs': 0.309584} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.038867] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1549.039298] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.039459] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.039776] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1549.040027] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c49a5af-9642-46e3-935b-a81c9fa77f18 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.044254] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1549.044254] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]521e32f3-7e51-dfc9-858b-3545b867d94e" [ 1549.044254] env[62627]: _type = "Task" [ 1549.044254] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.051468] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521e32f3-7e51-dfc9-858b-3545b867d94e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.274157] env[62627]: DEBUG nova.network.neutron [req-2297df5e-2be6-4bb9-a9ca-8da4c5437d4d req-444559b8-b275-4d1c-8076-76bbe9f31720 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updated VIF entry in instance network info cache for port 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1549.274509] env[62627]: DEBUG nova.network.neutron [req-2297df5e-2be6-4bb9-a9ca-8da4c5437d4d req-444559b8-b275-4d1c-8076-76bbe9f31720 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updating instance_info_cache with network_info: [{"id": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "address": "fa:16:3e:5f:eb:ca", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374cdd5a-3b", "ovs_interfaceid": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.554591] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]521e32f3-7e51-dfc9-858b-3545b867d94e, 'name': SearchDatastore_Task, 'duration_secs': 0.011411} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.554906] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.555096] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1549.555366] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.555520] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.555700] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1549.555945] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a68febc-03e4-4284-a041-7dd4d519d181 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.563640] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1549.563854] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1549.564439] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b75c75f-e826-4ee1-a90f-dee65a2e10b9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.569115] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1549.569115] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52df4d8f-fe74-c12e-0db4-5abcfe6da816" [ 1549.569115] env[62627]: _type = "Task" [ 1549.569115] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.576040] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52df4d8f-fe74-c12e-0db4-5abcfe6da816, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.777428] env[62627]: DEBUG oslo_concurrency.lockutils [req-2297df5e-2be6-4bb9-a9ca-8da4c5437d4d req-444559b8-b275-4d1c-8076-76bbe9f31720 service nova] Releasing lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.079162] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52df4d8f-fe74-c12e-0db4-5abcfe6da816, 'name': SearchDatastore_Task, 'duration_secs': 0.007787} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.079898] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c808f175-7f78-4aec-8a05-3523ddf82420 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.085567] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1550.085567] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd6a72-0963-91a6-3e3e-c1e5688eab79" [ 1550.085567] env[62627]: _type = "Task" [ 1550.085567] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.092441] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd6a72-0963-91a6-3e3e-c1e5688eab79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.596246] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52cd6a72-0963-91a6-3e3e-c1e5688eab79, 'name': SearchDatastore_Task, 'duration_secs': 0.009029} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.596511] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.596751] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 98c33a76-60af-4449-b06b-58c7b0ed7cc9/98c33a76-60af-4449-b06b-58c7b0ed7cc9.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1550.597016] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-819f4074-59e1-4259-b4e6-3e80c90438de {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.603306] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1550.603306] env[62627]: value = "task-2195634" [ 1550.603306] env[62627]: _type = "Task" [ 1550.603306] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.610213] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.113685] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195634, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.418456} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.114118] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] 98c33a76-60af-4449-b06b-58c7b0ed7cc9/98c33a76-60af-4449-b06b-58c7b0ed7cc9.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1551.114197] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1551.114416] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-235bf070-11bf-4720-bba4-9cf5a53fc89d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.120188] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1551.120188] env[62627]: value = "task-2195635" [ 1551.120188] env[62627]: _type = "Task" [ 1551.120188] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.127139] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195635, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.630148] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195635, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10223} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.630417] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1551.631175] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa93270b-96bf-46d0-8c9c-c8f70487dc4c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.651651] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 98c33a76-60af-4449-b06b-58c7b0ed7cc9/98c33a76-60af-4449-b06b-58c7b0ed7cc9.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1551.651874] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49679583-0b01-4717-8eb6-14fc63c5d5a9 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.671632] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1551.671632] env[62627]: value = "task-2195636" [ 1551.671632] env[62627]: _type = "Task" [ 1551.671632] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.678839] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195636, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.181588] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195636, 'name': ReconfigVM_Task, 'duration_secs': 0.254263} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.182115] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 98c33a76-60af-4449-b06b-58c7b0ed7cc9/98c33a76-60af-4449-b06b-58c7b0ed7cc9.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1552.182774] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9bfb435c-2be2-4ab3-826c-fa33a77dd206 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.188924] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1552.188924] env[62627]: value = "task-2195637" [ 1552.188924] env[62627]: _type = "Task" [ 1552.188924] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.197747] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195637, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.698947] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195637, 'name': Rename_Task, 'duration_secs': 0.142374} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.699261] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1552.699509] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27b1dc2a-a054-4891-bb5d-1b51f7cb6e71 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.706098] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1552.706098] env[62627]: value = "task-2195638" [ 1552.706098] env[62627]: _type = "Task" [ 1552.706098] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.713090] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195638, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.215855] env[62627]: DEBUG oslo_vmware.api [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195638, 'name': PowerOnVM_Task, 'duration_secs': 0.420904} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.216311] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1553.216311] env[62627]: INFO nova.compute.manager [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Took 6.82 seconds to spawn the instance on the hypervisor. [ 1553.216470] env[62627]: DEBUG nova.compute.manager [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1553.217258] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14a6e71-af8e-4bef-8815-7bde23c179b2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.735487] env[62627]: INFO nova.compute.manager [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Took 11.54 seconds to build instance. [ 1554.234484] env[62627]: DEBUG nova.compute.manager [req-a0450d54-d7a3-42cd-a755-eecf18f220cf req-2e1348f3-9921-49d6-9384-272a2473a1a0 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Received event network-changed-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1554.234740] env[62627]: DEBUG nova.compute.manager [req-a0450d54-d7a3-42cd-a755-eecf18f220cf req-2e1348f3-9921-49d6-9384-272a2473a1a0 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Refreshing instance network info cache due to event network-changed-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1554.234886] env[62627]: DEBUG oslo_concurrency.lockutils [req-a0450d54-d7a3-42cd-a755-eecf18f220cf req-2e1348f3-9921-49d6-9384-272a2473a1a0 service nova] Acquiring lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.235039] env[62627]: DEBUG oslo_concurrency.lockutils [req-a0450d54-d7a3-42cd-a755-eecf18f220cf req-2e1348f3-9921-49d6-9384-272a2473a1a0 service nova] Acquired lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.235208] env[62627]: DEBUG nova.network.neutron [req-a0450d54-d7a3-42cd-a755-eecf18f220cf req-2e1348f3-9921-49d6-9384-272a2473a1a0 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Refreshing network info cache for port 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1554.237704] env[62627]: DEBUG oslo_concurrency.lockutils [None req-56d06bd3-dec7-48a7-bd88-c04e31896ea6 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.051s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.919662] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.919951] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.957377] env[62627]: DEBUG nova.network.neutron [req-a0450d54-d7a3-42cd-a755-eecf18f220cf req-2e1348f3-9921-49d6-9384-272a2473a1a0 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updated VIF entry in instance network info cache for port 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1554.957740] env[62627]: DEBUG nova.network.neutron [req-a0450d54-d7a3-42cd-a755-eecf18f220cf req-2e1348f3-9921-49d6-9384-272a2473a1a0 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updating instance_info_cache with network_info: [{"id": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "address": "fa:16:3e:5f:eb:ca", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374cdd5a-3b", "ovs_interfaceid": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.426442] env[62627]: INFO nova.compute.manager [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Detaching volume 34258b00-8c9e-4ec9-8c4d-c99d0551604a [ 1555.456341] env[62627]: INFO nova.virt.block_device [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Attempting to driver detach volume 34258b00-8c9e-4ec9-8c4d-c99d0551604a from mountpoint /dev/sdb [ 1555.456623] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1555.456820] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447870', 'volume_id': '34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'name': 'volume-34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '54b2cc24-6652-4cc4-93cc-b059f8a9be86', 'attached_at': '', 'detached_at': '', 'volume_id': '34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'serial': '34258b00-8c9e-4ec9-8c4d-c99d0551604a'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1555.457693] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cc22d6-8f29-4365-b73b-9c1c85a270b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.460625] env[62627]: DEBUG oslo_concurrency.lockutils [req-a0450d54-d7a3-42cd-a755-eecf18f220cf req-2e1348f3-9921-49d6-9384-272a2473a1a0 service nova] Releasing lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.480036] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec811b93-d942-4e94-a60b-37cce0648f88 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.487863] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acddf48d-3745-47c1-9f4f-d5a87c517a23 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.507562] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61e9bb2-2808-48d9-ace3-a84df1b2af02 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.521186] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] The volume has not been displaced from its original location: [datastore2] volume-34258b00-8c9e-4ec9-8c4d-c99d0551604a/volume-34258b00-8c9e-4ec9-8c4d-c99d0551604a.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1555.526590] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Reconfiguring VM instance instance-00000079 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1555.526708] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ff2419c-ecf4-4476-8da0-dce583b74bc4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.543695] env[62627]: DEBUG oslo_vmware.api [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1555.543695] env[62627]: value = "task-2195639" [ 1555.543695] env[62627]: _type = "Task" [ 1555.543695] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.551037] env[62627]: DEBUG oslo_vmware.api [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195639, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.053582] env[62627]: DEBUG oslo_vmware.api [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195639, 'name': ReconfigVM_Task, 'duration_secs': 0.237846} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.053875] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Reconfigured VM instance instance-00000079 to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1556.058436] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-962e79d8-18c1-4c5b-864f-62d95bfa28aa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.072144] env[62627]: DEBUG oslo_vmware.api [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1556.072144] env[62627]: value = "task-2195640" [ 1556.072144] env[62627]: _type = "Task" [ 1556.072144] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.080925] env[62627]: DEBUG oslo_vmware.api [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195640, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.296949] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1556.297236] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1556.581937] env[62627]: DEBUG oslo_vmware.api [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195640, 'name': ReconfigVM_Task, 'duration_secs': 0.13422} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.582300] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447870', 'volume_id': '34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'name': 'volume-34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '54b2cc24-6652-4cc4-93cc-b059f8a9be86', 'attached_at': '', 'detached_at': '', 'volume_id': '34258b00-8c9e-4ec9-8c4d-c99d0551604a', 'serial': '34258b00-8c9e-4ec9-8c4d-c99d0551604a'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1557.120981] env[62627]: DEBUG nova.objects.instance [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'flavor' on Instance uuid 54b2cc24-6652-4cc4-93cc-b059f8a9be86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1557.798723] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1558.086497] env[62627]: DEBUG oslo_concurrency.lockutils [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.127708] env[62627]: DEBUG oslo_concurrency.lockutils [None req-0e6d1b7f-59c3-458c-8483-c7da56241196 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.208s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.129469] env[62627]: DEBUG oslo_concurrency.lockutils [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.043s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.129679] env[62627]: DEBUG nova.compute.manager [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1558.130661] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47329301-42ee-4a01-b8cc-a50d620c2730 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.138876] env[62627]: DEBUG nova.compute.manager [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62627) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1558.139409] env[62627]: DEBUG nova.objects.instance [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'flavor' on Instance uuid 54b2cc24-6652-4cc4-93cc-b059f8a9be86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1559.146201] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1559.146610] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab20f523-027f-4f2e-9efe-cbdf7f7556ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.154856] env[62627]: DEBUG oslo_vmware.api [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1559.154856] env[62627]: value = "task-2195641" [ 1559.154856] env[62627]: _type = "Task" [ 1559.154856] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.163123] env[62627]: DEBUG oslo_vmware.api [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195641, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.665064] env[62627]: DEBUG oslo_vmware.api [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195641, 'name': PowerOffVM_Task, 'duration_secs': 0.16296} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.665338] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1559.665551] env[62627]: DEBUG nova.compute.manager [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1559.666419] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc32aa6-466d-4941-a550-edab69d331d3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.177364] env[62627]: DEBUG oslo_concurrency.lockutils [None req-224f53b5-e18f-48a4-8c04-31238422d6a8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.296733] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1560.477335] env[62627]: DEBUG nova.objects.instance [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'flavor' on Instance uuid 54b2cc24-6652-4cc4-93cc-b059f8a9be86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1560.800258] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.800577] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.800706] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.800866] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1560.801849] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c9db38-9b6c-4acb-a1f8-b0cd35f26104 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.810496] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7853f6c-b4d6-4084-9aa5-8126d69ee453 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.825807] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eef8937-deae-410e-9f90-ff6d9dafd01a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.832878] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06950c5-d791-402c-abdf-a210b74a3436 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.861542] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180677MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1560.861768] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.861900] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.982388] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.982593] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquired lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.982728] env[62627]: DEBUG nova.network.neutron [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1560.982896] env[62627]: DEBUG nova.objects.instance [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'info_cache' on Instance uuid 54b2cc24-6652-4cc4-93cc-b059f8a9be86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1561.485679] env[62627]: DEBUG nova.objects.base [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Object Instance<54b2cc24-6652-4cc4-93cc-b059f8a9be86> lazy-loaded attributes: flavor,info_cache {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1561.886614] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 54b2cc24-6652-4cc4-93cc-b059f8a9be86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.886781] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 98c33a76-60af-4449-b06b-58c7b0ed7cc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1561.886939] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1561.887089] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1561.921963] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70c1011-0c15-4756-a52b-baf7d1de46ed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.930029] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1d542a-abda-4c6d-b42a-33ac2eb08a8f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.960421] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6d6a39-fca7-4e75-be0d-4bd85180e18b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.967949] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb9d46f-12af-4d91-9575-b866a4fca49d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.981009] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1562.231094] env[62627]: DEBUG nova.network.neutron [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Updating instance_info_cache with network_info: [{"id": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "address": "fa:16:3e:80:8b:50", "network": {"id": "da92dc57-06a7-46e5-a51b-1f0a8d7373a7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-952102388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b88e3a0d8ee4e3682bd312872dcbc0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ce94b7-c4", "ovs_interfaceid": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.484281] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1562.734088] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Releasing lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.988525] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1562.988770] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.127s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.988956] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1562.989110] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Cleaning up deleted instances with incomplete migration {{(pid=62627) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1563.740777] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1563.740777] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b971f66-57dd-4cbe-90c6-f1a5e030119d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.747870] env[62627]: DEBUG oslo_vmware.api [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1563.747870] env[62627]: value = "task-2195642" [ 1563.747870] env[62627]: _type = "Task" [ 1563.747870] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.755650] env[62627]: DEBUG oslo_vmware.api [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.257931] env[62627]: DEBUG oslo_vmware.api [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195642, 'name': PowerOnVM_Task, 'duration_secs': 0.37626} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.258215] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1564.258416] env[62627]: DEBUG nova.compute.manager [None req-2680e146-b698-4b4a-8bb7-0537369eff0d tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1564.259184] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff56ef4-e7b4-414d-af92-72b32dd5d75b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.491688] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1564.491875] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1565.023235] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1565.023511] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquired lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1565.023606] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Forcefully refreshing network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1566.241510] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Updating instance_info_cache with network_info: [{"id": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "address": "fa:16:3e:80:8b:50", "network": {"id": "da92dc57-06a7-46e5-a51b-1f0a8d7373a7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-952102388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b88e3a0d8ee4e3682bd312872dcbc0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ce94b7-c4", "ovs_interfaceid": "b7ce94b7-c449-47bc-b068-3f8eca9f22f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.744292] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Releasing lock "refresh_cache-54b2cc24-6652-4cc4-93cc-b059f8a9be86" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1566.744496] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Updated the network info_cache for instance {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1566.744699] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.744863] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.745022] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.745171] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.745315] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.745445] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1567.297087] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.297468] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Cleaning up deleted instances {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1567.806143] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] There are 26 instances to clean {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1567.806330] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: a66936c9-e164-4431-b5e4-40cfbdb1ca8e] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1568.309627] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: de1345ab-57f6-4d2c-918e-e9cdcdc16b50] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1568.813849] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: cdff04c1-ec74-4afa-9f8c-7e1acbe9f336] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1569.317748] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 6a53e7a7-ec4b-4a97-98f7-8d686df3178c] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1569.821305] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 77b68106-1abe-426b-89ce-3495776b9233] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1570.324771] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: c90dcd2f-c718-41b2-912f-00fa0fe69b68] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1570.828450] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: cabc73d8-0304-4a7a-8101-068986ac9c3e] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1571.331484] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 6c4ebb14-c3a3-485c-bbb3-405a8b7f92db] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1571.835085] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 3b1e8b81-7d18-498a-b35e-8ab1c87374c8] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1572.339114] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 8c170084-a7f8-4774-8f9d-7874103b0f4f] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1572.842665] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 16ed0637-d24d-488d-b9ba-dd5a49a7b2a2] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1573.345851] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 66f280de-f963-4f68-818b-ab0534be4c55] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1573.850049] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 680afe04-ddc8-4e14-9260-509dded09a0b] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1574.352600] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 02b9bb2d-5752-4c09-9661-5966c2ac1bad] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1574.856066] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 00bb8c04-3c0b-48ff-8127-13d3e15cb86c] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1575.359495] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: cfdb0b96-0df5-4b71-b1c4-5f0f32f2155e] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1575.862511] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 7e340c99-8d15-4696-98d5-a952f5d2aeee] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1576.365550] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 5b001b38-d121-4e03-910e-54647536faf5] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1576.869088] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: f195662c-9493-469a-ba36-d712f9944b28] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1577.372637] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 1593ca50-bc97-464a-86ca-17995db15758] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1577.876691] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 1109aae0-62d2-4c55-8c68-f224f3de8b55] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1578.379702] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: b68042fd-4955-41e4-b14f-1f8268ace377] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1578.882685] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: c7e3246d-ffe7-4d81-a2ec-c5878649ed6b] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1579.386316] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 3001147e-7be3-4f72-b330-8f99bf7cf3ea] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1579.889813] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: e9d33611-e373-4303-937d-b43d893e51f5] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1580.393057] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: 6f9b5217-6ab8-4cf5-a2ab-ad0d9dd95b1a] Instance has had 0 of 5 cleanup attempts {{(pid=62627) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1592.186061] env[62627]: DEBUG oslo_concurrency.lockutils [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.186463] env[62627]: DEBUG oslo_concurrency.lockutils [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.186463] env[62627]: INFO nova.compute.manager [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Shelving [ 1593.196055] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1593.196055] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ec4fb8c-e17e-4d98-ba63-3c3ad7b86293 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.203983] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1593.203983] env[62627]: value = "task-2195643" [ 1593.203983] env[62627]: _type = "Task" [ 1593.203983] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.211795] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.713882] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195643, 'name': PowerOffVM_Task, 'duration_secs': 0.176463} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.714138] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1593.714939] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f89ce2-abd4-49f1-8891-572407badbfa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.732273] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5392de04-e4d5-4c9c-abdb-81949298d7b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.242142] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Creating Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1594.242599] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ef8fb92c-bb78-4f77-9e51-8d849187f370 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.250452] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1594.250452] env[62627]: value = "task-2195644" [ 1594.250452] env[62627]: _type = "Task" [ 1594.250452] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.258258] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195644, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.759853] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195644, 'name': CreateSnapshot_Task, 'duration_secs': 0.41185} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.760217] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Created Snapshot of the VM instance {{(pid=62627) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1594.760975] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b9f9d1-7b21-47a3-a465-38bf1ee9fbce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.029997] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.030307] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.030504] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.030686] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.030852] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.033169] env[62627]: INFO nova.compute.manager [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Terminating instance [ 1595.278497] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Creating linked-clone VM from snapshot {{(pid=62627) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1595.278932] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-93186d69-118f-4c04-817c-ba58719646dc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.287584] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1595.287584] env[62627]: value = "task-2195645" [ 1595.287584] env[62627]: _type = "Task" [ 1595.287584] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.295173] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195645, 'name': CloneVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.536819] env[62627]: DEBUG nova.compute.manager [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1595.537102] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1595.537988] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307cb0d0-28fd-4ff6-a86b-0f27f8c86262 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.545714] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1595.545953] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d185cc1e-bd56-4951-b6bc-d63d1fc6b9e2 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.552424] env[62627]: DEBUG oslo_vmware.api [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1595.552424] env[62627]: value = "task-2195646" [ 1595.552424] env[62627]: _type = "Task" [ 1595.552424] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.560389] env[62627]: DEBUG oslo_vmware.api [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.797454] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195645, 'name': CloneVM_Task} progress is 94%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.070949] env[62627]: DEBUG oslo_vmware.api [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195646, 'name': PowerOffVM_Task, 'duration_secs': 0.206637} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.071300] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1596.071392] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1596.071747] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d22e61a5-62c7-4921-93a8-419ebc618fcb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.131179] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1596.131431] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1596.131581] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Deleting the datastore file [datastore2] 54b2cc24-6652-4cc4-93cc-b059f8a9be86 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1596.131879] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d41efbf-893c-427f-8b88-12f7cb31facc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.138165] env[62627]: DEBUG oslo_vmware.api [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1596.138165] env[62627]: value = "task-2195648" [ 1596.138165] env[62627]: _type = "Task" [ 1596.138165] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.145520] env[62627]: DEBUG oslo_vmware.api [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195648, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.298524] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195645, 'name': CloneVM_Task} progress is 95%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.648304] env[62627]: DEBUG oslo_vmware.api [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195648, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207076} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.648650] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1596.648920] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1596.649195] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1596.649456] env[62627]: INFO nova.compute.manager [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1596.649794] env[62627]: DEBUG oslo.service.loopingcall [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1596.650061] env[62627]: DEBUG nova.compute.manager [-] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1596.650222] env[62627]: DEBUG nova.network.neutron [-] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1596.798298] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195645, 'name': CloneVM_Task, 'duration_secs': 1.160684} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.798558] env[62627]: INFO nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Created linked-clone VM from snapshot [ 1596.799286] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34aecd9f-4e11-49c5-890a-ac1afd145818 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.806348] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Uploading image d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2 {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1596.835693] env[62627]: DEBUG oslo_vmware.rw_handles [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1596.835693] env[62627]: value = "vm-447874" [ 1596.835693] env[62627]: _type = "VirtualMachine" [ 1596.835693] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1596.835989] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-60882b3a-499d-483d-85e9-1cb19f736058 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.842206] env[62627]: DEBUG oslo_vmware.rw_handles [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lease: (returnval){ [ 1596.842206] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52839e8b-e0cc-73ea-4b85-753ede368a85" [ 1596.842206] env[62627]: _type = "HttpNfcLease" [ 1596.842206] env[62627]: } obtained for exporting VM: (result){ [ 1596.842206] env[62627]: value = "vm-447874" [ 1596.842206] env[62627]: _type = "VirtualMachine" [ 1596.842206] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1596.842602] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the lease: (returnval){ [ 1596.842602] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52839e8b-e0cc-73ea-4b85-753ede368a85" [ 1596.842602] env[62627]: _type = "HttpNfcLease" [ 1596.842602] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1596.850046] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1596.850046] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52839e8b-e0cc-73ea-4b85-753ede368a85" [ 1596.850046] env[62627]: _type = "HttpNfcLease" [ 1596.850046] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1597.110767] env[62627]: DEBUG nova.compute.manager [req-6812cfeb-4efd-41a4-968f-1af1ad05e084 req-3d265d6e-28ed-4436-b4c4-ea4c1bbbb2d8 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Received event network-vif-deleted-b7ce94b7-c449-47bc-b068-3f8eca9f22f0 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1597.110992] env[62627]: INFO nova.compute.manager [req-6812cfeb-4efd-41a4-968f-1af1ad05e084 req-3d265d6e-28ed-4436-b4c4-ea4c1bbbb2d8 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Neutron deleted interface b7ce94b7-c449-47bc-b068-3f8eca9f22f0; detaching it from the instance and deleting it from the info cache [ 1597.111180] env[62627]: DEBUG nova.network.neutron [req-6812cfeb-4efd-41a4-968f-1af1ad05e084 req-3d265d6e-28ed-4436-b4c4-ea4c1bbbb2d8 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.354011] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1597.354011] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52839e8b-e0cc-73ea-4b85-753ede368a85" [ 1597.354011] env[62627]: _type = "HttpNfcLease" [ 1597.354011] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1597.354011] env[62627]: DEBUG oslo_vmware.rw_handles [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1597.354011] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52839e8b-e0cc-73ea-4b85-753ede368a85" [ 1597.354011] env[62627]: _type = "HttpNfcLease" [ 1597.354011] env[62627]: }. {{(pid=62627) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1597.354721] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edd21c1-711b-4de4-8e28-a1e92b6c3576 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.364374] env[62627]: DEBUG oslo_vmware.rw_handles [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529087d4-62a2-6e3b-a6a7-8dbae0a462dd/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1597.364580] env[62627]: DEBUG oslo_vmware.rw_handles [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529087d4-62a2-6e3b-a6a7-8dbae0a462dd/disk-0.vmdk for reading. {{(pid=62627) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1597.475989] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b8e69fd1-7996-4187-9e42-8f7ccd45a0fa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.596042] env[62627]: DEBUG nova.network.neutron [-] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.613456] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2ae08f0-e3b2-4653-9a04-77b06220a607 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.622888] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb42beb-bce1-4dfc-b982-3e7c5c7d9246 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.647221] env[62627]: DEBUG nova.compute.manager [req-6812cfeb-4efd-41a4-968f-1af1ad05e084 req-3d265d6e-28ed-4436-b4c4-ea4c1bbbb2d8 service nova] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Detach interface failed, port_id=b7ce94b7-c449-47bc-b068-3f8eca9f22f0, reason: Instance 54b2cc24-6652-4cc4-93cc-b059f8a9be86 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1598.097796] env[62627]: INFO nova.compute.manager [-] [instance: 54b2cc24-6652-4cc4-93cc-b059f8a9be86] Took 1.45 seconds to deallocate network for instance. [ 1598.605361] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.605747] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.605854] env[62627]: DEBUG nova.objects.instance [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'resources' on Instance uuid 54b2cc24-6652-4cc4-93cc-b059f8a9be86 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1599.210687] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05d372e-7674-42cf-8dc5-819d4ed84c64 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.219035] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1e6cd8-485b-4afb-8cbe-b8952e749c2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.250481] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11ef507-53e1-4475-81a4-cdc37a447033 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.258452] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23f9c54-e8e5-4a70-914a-2e4f64990bd4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.271969] env[62627]: DEBUG nova.compute.provider_tree [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1599.775894] env[62627]: DEBUG nova.scheduler.client.report [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1600.280717] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.675s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.299960] env[62627]: INFO nova.scheduler.client.report [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Deleted allocations for instance 54b2cc24-6652-4cc4-93cc-b059f8a9be86 [ 1600.807832] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c1834cbe-4459-46e4-a461-420a674925cd tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "54b2cc24-6652-4cc4-93cc-b059f8a9be86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.777s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.278547] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.278900] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.781735] env[62627]: DEBUG nova.compute.manager [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Starting instance... {{(pid=62627) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1604.306652] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.307027] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.309292] env[62627]: INFO nova.compute.claims [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1605.358352] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e9693f-937b-4999-997b-81d91e7da3c8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.370698] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e6d4ce-bdc4-48dc-bf74-9e400e9b3a58 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.400922] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139314be-c5ce-4c82-9419-6f2a36eba931 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.408628] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38078876-cc50-4b78-8597-b11d8b1c9e38 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.422659] env[62627]: DEBUG nova.compute.provider_tree [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1605.926177] env[62627]: DEBUG nova.scheduler.client.report [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1606.432504] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.125s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.433051] env[62627]: DEBUG nova.compute.manager [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Start building networks asynchronously for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1606.695168] env[62627]: DEBUG oslo_vmware.rw_handles [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529087d4-62a2-6e3b-a6a7-8dbae0a462dd/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1606.696112] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4522fa-ca77-48b7-a7fd-d15c9b894bfa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.702359] env[62627]: DEBUG oslo_vmware.rw_handles [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529087d4-62a2-6e3b-a6a7-8dbae0a462dd/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1606.702521] env[62627]: ERROR oslo_vmware.rw_handles [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529087d4-62a2-6e3b-a6a7-8dbae0a462dd/disk-0.vmdk due to incomplete transfer. [ 1606.702728] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4af30e52-9045-412f-ab58-7520ef2b3f7f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.709540] env[62627]: DEBUG oslo_vmware.rw_handles [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529087d4-62a2-6e3b-a6a7-8dbae0a462dd/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1606.709729] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Uploaded image d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2 to the Glance image server {{(pid=62627) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1606.712136] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Destroying the VM {{(pid=62627) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1606.712354] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e7fde89e-0617-4c43-9f23-936144ead173 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.718329] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1606.718329] env[62627]: value = "task-2195650" [ 1606.718329] env[62627]: _type = "Task" [ 1606.718329] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.725228] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195650, 'name': Destroy_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.938433] env[62627]: DEBUG nova.compute.utils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1606.939885] env[62627]: DEBUG nova.compute.manager [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Allocating IP information in the background. {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1606.940106] env[62627]: DEBUG nova.network.neutron [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] allocate_for_instance() {{(pid=62627) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1606.989942] env[62627]: DEBUG nova.policy [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a247050ec104a58901c4f14acc41542', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b88e3a0d8ee4e3682bd312872dcbc0f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62627) authorize /opt/stack/nova/nova/policy.py:192}} [ 1607.229423] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195650, 'name': Destroy_Task, 'duration_secs': 0.286351} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.229775] env[62627]: INFO nova.virt.vmwareapi.vm_util [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Destroyed the VM [ 1607.230062] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Deleting Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1607.230330] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ac24d5ba-e357-447f-8310-f36531bb5044 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.237258] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1607.237258] env[62627]: value = "task-2195651" [ 1607.237258] env[62627]: _type = "Task" [ 1607.237258] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.245073] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195651, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.281851] env[62627]: DEBUG nova.network.neutron [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Successfully created port: 82de6dd8-1d57-45fc-9263-40c89eeacaa3 {{(pid=62627) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1607.443345] env[62627]: DEBUG nova.compute.manager [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Start building block device mappings for instance. {{(pid=62627) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1607.747115] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195651, 'name': RemoveSnapshot_Task, 'duration_secs': 0.348763} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.747386] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Deleted Snapshot of the VM instance {{(pid=62627) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1607.747662] env[62627]: DEBUG nova.compute.manager [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1607.748423] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b5349f-8cba-4840-bcd7-0c0f14bb6244 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.260092] env[62627]: INFO nova.compute.manager [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Shelve offloading [ 1608.452698] env[62627]: DEBUG nova.compute.manager [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Start spawning the instance on the hypervisor. {{(pid=62627) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1608.480035] env[62627]: DEBUG nova.virt.hardware [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-01-30T20:28:30Z,direct_url=,disk_format='vmdk',id=d5497cf1-16e8-478a-a834-e9289b1751b7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ccb0cafa1a904d578ec4998b386bebf7',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-01-30T20:28:30Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1608.480297] env[62627]: DEBUG nova.virt.hardware [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1608.480455] env[62627]: DEBUG nova.virt.hardware [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1608.480636] env[62627]: DEBUG nova.virt.hardware [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1608.480784] env[62627]: DEBUG nova.virt.hardware [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1608.480934] env[62627]: DEBUG nova.virt.hardware [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1608.481155] env[62627]: DEBUG nova.virt.hardware [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1608.481315] env[62627]: DEBUG nova.virt.hardware [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1608.481478] env[62627]: DEBUG nova.virt.hardware [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1608.481639] env[62627]: DEBUG nova.virt.hardware [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1608.481846] env[62627]: DEBUG nova.virt.hardware [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1608.482710] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d588ba4-c171-413f-9cc1-21391fde24da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.490579] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c560bd7-160d-474a-83d3-175ce835c2da {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.655542] env[62627]: DEBUG nova.compute.manager [req-16ab2fb6-64bb-430a-b477-1a1f943bad45 req-08a638fc-88c0-4722-9e1a-52694e30cce3 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Received event network-vif-plugged-82de6dd8-1d57-45fc-9263-40c89eeacaa3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1608.655824] env[62627]: DEBUG oslo_concurrency.lockutils [req-16ab2fb6-64bb-430a-b477-1a1f943bad45 req-08a638fc-88c0-4722-9e1a-52694e30cce3 service nova] Acquiring lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.656033] env[62627]: DEBUG oslo_concurrency.lockutils [req-16ab2fb6-64bb-430a-b477-1a1f943bad45 req-08a638fc-88c0-4722-9e1a-52694e30cce3 service nova] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.656239] env[62627]: DEBUG oslo_concurrency.lockutils [req-16ab2fb6-64bb-430a-b477-1a1f943bad45 req-08a638fc-88c0-4722-9e1a-52694e30cce3 service nova] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.656472] env[62627]: DEBUG nova.compute.manager [req-16ab2fb6-64bb-430a-b477-1a1f943bad45 req-08a638fc-88c0-4722-9e1a-52694e30cce3 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] No waiting events found dispatching network-vif-plugged-82de6dd8-1d57-45fc-9263-40c89eeacaa3 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1608.656616] env[62627]: WARNING nova.compute.manager [req-16ab2fb6-64bb-430a-b477-1a1f943bad45 req-08a638fc-88c0-4722-9e1a-52694e30cce3 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Received unexpected event network-vif-plugged-82de6dd8-1d57-45fc-9263-40c89eeacaa3 for instance with vm_state building and task_state spawning. [ 1608.738242] env[62627]: DEBUG nova.network.neutron [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Successfully updated port: 82de6dd8-1d57-45fc-9263-40c89eeacaa3 {{(pid=62627) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1608.763125] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1608.763405] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68c8a37c-ee7c-41c4-ad15-2d47fc7da1be {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.771202] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1608.771202] env[62627]: value = "task-2195652" [ 1608.771202] env[62627]: _type = "Task" [ 1608.771202] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.780903] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] VM already powered off {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1608.781111] env[62627]: DEBUG nova.compute.manager [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1608.781814] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a732056b-40b5-4481-bc82-4bc499052ea8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.787213] env[62627]: DEBUG oslo_concurrency.lockutils [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.787374] env[62627]: DEBUG oslo_concurrency.lockutils [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.787538] env[62627]: DEBUG nova.network.neutron [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1609.241377] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.241523] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquired lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.241669] env[62627]: DEBUG nova.network.neutron [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1609.486821] env[62627]: DEBUG nova.network.neutron [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updating instance_info_cache with network_info: [{"id": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "address": "fa:16:3e:5f:eb:ca", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374cdd5a-3b", "ovs_interfaceid": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.771954] env[62627]: DEBUG nova.network.neutron [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1609.892402] env[62627]: DEBUG nova.network.neutron [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Updating instance_info_cache with network_info: [{"id": "82de6dd8-1d57-45fc-9263-40c89eeacaa3", "address": "fa:16:3e:56:28:96", "network": {"id": "da92dc57-06a7-46e5-a51b-1f0a8d7373a7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-952102388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b88e3a0d8ee4e3682bd312872dcbc0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82de6dd8-1d", "ovs_interfaceid": "82de6dd8-1d57-45fc-9263-40c89eeacaa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.989555] env[62627]: DEBUG oslo_concurrency.lockutils [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.285945] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1610.286874] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5422290-e233-4672-b6bc-2944ed51c841 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.295091] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1610.295332] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46e9300a-41c0-4d39-9bd2-5b9b746d9164 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.355344] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1610.355538] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1610.355720] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleting the datastore file [datastore2] 98c33a76-60af-4449-b06b-58c7b0ed7cc9 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1610.355980] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ff8d730-fd25-4430-a519-066d0d20e1e5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.362396] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1610.362396] env[62627]: value = "task-2195654" [ 1610.362396] env[62627]: _type = "Task" [ 1610.362396] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.369726] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195654, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.395288] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Releasing lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.395554] env[62627]: DEBUG nova.compute.manager [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Instance network_info: |[{"id": "82de6dd8-1d57-45fc-9263-40c89eeacaa3", "address": "fa:16:3e:56:28:96", "network": {"id": "da92dc57-06a7-46e5-a51b-1f0a8d7373a7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-952102388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b88e3a0d8ee4e3682bd312872dcbc0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82de6dd8-1d", "ovs_interfaceid": "82de6dd8-1d57-45fc-9263-40c89eeacaa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62627) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1610.395908] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:28:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a8f5363-be3a-4f92-9ccf-33bb0c8113b3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82de6dd8-1d57-45fc-9263-40c89eeacaa3', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1610.403272] env[62627]: DEBUG oslo.service.loopingcall [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1610.403455] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1610.403647] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b99a49da-8e8e-4e40-b39e-a68797487cce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.422046] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1610.422046] env[62627]: value = "task-2195655" [ 1610.422046] env[62627]: _type = "Task" [ 1610.422046] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.428599] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195655, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.685693] env[62627]: DEBUG nova.compute.manager [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Received event network-changed-82de6dd8-1d57-45fc-9263-40c89eeacaa3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1610.685953] env[62627]: DEBUG nova.compute.manager [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Refreshing instance network info cache due to event network-changed-82de6dd8-1d57-45fc-9263-40c89eeacaa3. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1610.685953] env[62627]: DEBUG oslo_concurrency.lockutils [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] Acquiring lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.686223] env[62627]: DEBUG oslo_concurrency.lockutils [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] Acquired lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.686551] env[62627]: DEBUG nova.network.neutron [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Refreshing network info cache for port 82de6dd8-1d57-45fc-9263-40c89eeacaa3 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1610.872176] env[62627]: DEBUG oslo_vmware.api [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195654, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133345} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.872467] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1610.872669] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1610.872848] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1610.893107] env[62627]: INFO nova.scheduler.client.report [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleted allocations for instance 98c33a76-60af-4449-b06b-58c7b0ed7cc9 [ 1610.934011] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195655, 'name': CreateVM_Task, 'duration_secs': 0.288342} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.934158] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1610.934726] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.934883] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.935215] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1610.935454] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54a83b38-42de-4ac7-aeec-9b0b486d3057 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.939884] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1610.939884] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52243ef6-34b3-d401-3ea9-4e6be9c9b6c4" [ 1610.939884] env[62627]: _type = "Task" [ 1610.939884] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.609938] env[62627]: DEBUG oslo_concurrency.lockutils [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.610176] env[62627]: DEBUG oslo_concurrency.lockutils [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.610354] env[62627]: DEBUG nova.objects.instance [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lazy-loading 'resources' on Instance uuid 98c33a76-60af-4449-b06b-58c7b0ed7cc9 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1611.612156] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52243ef6-34b3-d401-3ea9-4e6be9c9b6c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.612343] env[62627]: WARNING oslo_vmware.common.loopingcall [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] task run outlasted interval by 0.17224899999999999 sec [ 1611.621993] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52243ef6-34b3-d401-3ea9-4e6be9c9b6c4, 'name': SearchDatastore_Task, 'duration_secs': 0.00961} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.622265] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.622475] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Processing image d5497cf1-16e8-478a-a834-e9289b1751b7 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1611.622693] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.622853] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.623048] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1611.623287] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c57b35cc-9164-4dd1-ad2c-04f26c1221cc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.631415] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1611.631583] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1611.632264] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-765cfe86-bd92-42b8-91ab-29edbd60e224 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.636668] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1611.636668] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f24991-3028-fca6-9710-f8f24bfc321e" [ 1611.636668] env[62627]: _type = "Task" [ 1611.636668] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.644994] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f24991-3028-fca6-9710-f8f24bfc321e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.803087] env[62627]: DEBUG nova.network.neutron [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Updated VIF entry in instance network info cache for port 82de6dd8-1d57-45fc-9263-40c89eeacaa3. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1611.803402] env[62627]: DEBUG nova.network.neutron [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Updating instance_info_cache with network_info: [{"id": "82de6dd8-1d57-45fc-9263-40c89eeacaa3", "address": "fa:16:3e:56:28:96", "network": {"id": "da92dc57-06a7-46e5-a51b-1f0a8d7373a7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-952102388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b88e3a0d8ee4e3682bd312872dcbc0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82de6dd8-1d", "ovs_interfaceid": "82de6dd8-1d57-45fc-9263-40c89eeacaa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.113115] env[62627]: DEBUG nova.objects.instance [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lazy-loading 'numa_topology' on Instance uuid 98c33a76-60af-4449-b06b-58c7b0ed7cc9 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1612.147164] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]52f24991-3028-fca6-9710-f8f24bfc321e, 'name': SearchDatastore_Task, 'duration_secs': 0.008308} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.147933] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b84e1d29-dfa1-427f-8ca3-8fd8c6c32cae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.153218] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1612.153218] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5272e57f-c36b-8ac1-be6c-7a1ed556d900" [ 1612.153218] env[62627]: _type = "Task" [ 1612.153218] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.160865] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5272e57f-c36b-8ac1-be6c-7a1ed556d900, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.306505] env[62627]: DEBUG oslo_concurrency.lockutils [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] Releasing lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.306767] env[62627]: DEBUG nova.compute.manager [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Received event network-vif-unplugged-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1612.306957] env[62627]: DEBUG oslo_concurrency.lockutils [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] Acquiring lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.307174] env[62627]: DEBUG oslo_concurrency.lockutils [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.307404] env[62627]: DEBUG oslo_concurrency.lockutils [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.307591] env[62627]: DEBUG nova.compute.manager [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] No waiting events found dispatching network-vif-unplugged-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1612.307795] env[62627]: WARNING nova.compute.manager [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Received unexpected event network-vif-unplugged-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 for instance with vm_state shelved and task_state shelving_offloading. [ 1612.307973] env[62627]: DEBUG nova.compute.manager [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Received event network-changed-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1612.308140] env[62627]: DEBUG nova.compute.manager [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Refreshing instance network info cache due to event network-changed-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1612.308334] env[62627]: DEBUG oslo_concurrency.lockutils [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] Acquiring lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.308461] env[62627]: DEBUG oslo_concurrency.lockutils [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] Acquired lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.308611] env[62627]: DEBUG nova.network.neutron [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Refreshing network info cache for port 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1612.615260] env[62627]: DEBUG nova.objects.base [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Object Instance<98c33a76-60af-4449-b06b-58c7b0ed7cc9> lazy-loaded attributes: resources,numa_topology {{(pid=62627) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1612.643652] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49a03d9-459f-4dda-b24e-39e4934f583f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.651994] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a751bd85-bb88-42cd-aafe-3ad36b61a5cb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.691827] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7678b58-e883-4b1c-8dbb-d696d88573c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.694407] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5272e57f-c36b-8ac1-be6c-7a1ed556d900, 'name': SearchDatastore_Task, 'duration_secs': 0.010076} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.694662] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.694911] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] ef9f511a-3b2c-4871-b52a-4959e7d3390b/ef9f511a-3b2c-4871-b52a-4959e7d3390b.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1612.695547] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5c3f974-4f06-44bb-92fd-40ebf55a11b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.701025] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5552fb02-b992-4491-b0f1-93d84b728b21 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.704788] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1612.704788] env[62627]: value = "task-2195656" [ 1612.704788] env[62627]: _type = "Task" [ 1612.704788] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.715687] env[62627]: DEBUG nova.compute.provider_tree [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1612.720818] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195656, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.121100] env[62627]: DEBUG nova.network.neutron [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updated VIF entry in instance network info cache for port 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1613.121484] env[62627]: DEBUG nova.network.neutron [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updating instance_info_cache with network_info: [{"id": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "address": "fa:16:3e:5f:eb:ca", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap374cdd5a-3b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.216097] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195656, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.236744] env[62627]: ERROR nova.scheduler.client.report [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [req-299e9a06-c942-4100-9739-0a6e8fe89ec2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c17e7f32-68d8-4e2f-8979-15d701de02f7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-299e9a06-c942-4100-9739-0a6e8fe89ec2"}]} [ 1613.252919] env[62627]: DEBUG nova.scheduler.client.report [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Refreshing inventories for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1613.265378] env[62627]: DEBUG nova.scheduler.client.report [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Updating ProviderTree inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1613.265610] env[62627]: DEBUG nova.compute.provider_tree [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1613.275872] env[62627]: DEBUG nova.scheduler.client.report [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Refreshing aggregate associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, aggregates: None {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1613.291842] env[62627]: DEBUG nova.scheduler.client.report [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Refreshing trait associations for resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=62627) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1613.323169] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3956c4-721e-4a4b-ba07-414b7122b6b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.330604] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2d78c1-b63f-4e04-b7c1-278194b9acf0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.360199] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7f198a-67cd-4a8b-a3b2-fdcf780c73ed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.366869] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fb5cfa-1fd9-42d5-b6dc-6c858d3f5d4b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.379446] env[62627]: DEBUG nova.compute.provider_tree [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1613.624573] env[62627]: DEBUG oslo_concurrency.lockutils [req-ef620025-87e9-4cde-8c5b-2a613a89ecf8 req-db9e3bbe-9ea3-4b1b-8ce2-45e35ffbcd31 service nova] Releasing lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.714829] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195656, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535235} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.715097] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d5497cf1-16e8-478a-a834-e9289b1751b7/d5497cf1-16e8-478a-a834-e9289b1751b7.vmdk to [datastore2] ef9f511a-3b2c-4871-b52a-4959e7d3390b/ef9f511a-3b2c-4871-b52a-4959e7d3390b.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1613.715310] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Extending root virtual disk to 1048576 {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1613.715554] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27968bd7-568f-4b3a-8a8d-98a510212f61 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.722561] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1613.722561] env[62627]: value = "task-2195657" [ 1613.722561] env[62627]: _type = "Task" [ 1613.722561] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.729522] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195657, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.882364] env[62627]: DEBUG nova.scheduler.client.report [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1614.032836] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.232524] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195657, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068555} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.232918] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Extended root virtual disk {{(pid=62627) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1614.233544] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834b6b06-7338-44e3-84fd-595652ff97d7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.254541] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] ef9f511a-3b2c-4871-b52a-4959e7d3390b/ef9f511a-3b2c-4871-b52a-4959e7d3390b.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1614.254761] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64c6d988-38c7-41c9-8289-69c9034eb4fe {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.272738] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1614.272738] env[62627]: value = "task-2195658" [ 1614.272738] env[62627]: _type = "Task" [ 1614.272738] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.279723] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195658, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.387819] env[62627]: DEBUG oslo_concurrency.lockutils [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.778s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.784157] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195658, 'name': ReconfigVM_Task, 'duration_secs': 0.268775} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.784421] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Reconfigured VM instance instance-0000007d to attach disk [datastore2] ef9f511a-3b2c-4871-b52a-4959e7d3390b/ef9f511a-3b2c-4871-b52a-4959e7d3390b.vmdk or device None with type sparse {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1614.785083] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-896e5359-9e6e-4baf-be35-1b3611e269d8 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.791942] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1614.791942] env[62627]: value = "task-2195659" [ 1614.791942] env[62627]: _type = "Task" [ 1614.791942] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.799324] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195659, 'name': Rename_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.896167] env[62627]: DEBUG oslo_concurrency.lockutils [None req-83d2d3fb-e357-43df-8d55-efa78c6c3c2b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.710s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.896955] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.864s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.897154] env[62627]: INFO nova.compute.manager [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Unshelving [ 1615.301451] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195659, 'name': Rename_Task, 'duration_secs': 0.13269} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.301912] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1615.301912] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b32dd7a6-1dd1-4b3e-b576-6b2b8048de06 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.306960] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1615.306960] env[62627]: value = "task-2195660" [ 1615.306960] env[62627]: _type = "Task" [ 1615.306960] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.313713] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195660, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.816287] env[62627]: DEBUG oslo_vmware.api [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195660, 'name': PowerOnVM_Task, 'duration_secs': 0.462266} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.816568] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1615.816764] env[62627]: INFO nova.compute.manager [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Took 7.36 seconds to spawn the instance on the hypervisor. [ 1615.816937] env[62627]: DEBUG nova.compute.manager [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1615.817700] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7fba26-815e-4f2a-97f2-eed4470547b5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.921635] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.921935] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.922168] env[62627]: DEBUG nova.objects.instance [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lazy-loading 'pci_requests' on Instance uuid 98c33a76-60af-4449-b06b-58c7b0ed7cc9 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1616.333523] env[62627]: INFO nova.compute.manager [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Took 12.04 seconds to build instance. [ 1616.425953] env[62627]: DEBUG nova.objects.instance [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lazy-loading 'numa_topology' on Instance uuid 98c33a76-60af-4449-b06b-58c7b0ed7cc9 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1616.835632] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f95707a7-f17c-4f52-98af-7b0827d54bb8 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.557s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.928439] env[62627]: INFO nova.compute.claims [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1617.715073] env[62627]: DEBUG nova.compute.manager [req-0ab8d388-ca4e-404b-b501-a801ff4682fe req-ad20d0dc-05ca-4f5e-ae99-39f65db2fcd9 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Received event network-changed-82de6dd8-1d57-45fc-9263-40c89eeacaa3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1617.715073] env[62627]: DEBUG nova.compute.manager [req-0ab8d388-ca4e-404b-b501-a801ff4682fe req-ad20d0dc-05ca-4f5e-ae99-39f65db2fcd9 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Refreshing instance network info cache due to event network-changed-82de6dd8-1d57-45fc-9263-40c89eeacaa3. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1617.715073] env[62627]: DEBUG oslo_concurrency.lockutils [req-0ab8d388-ca4e-404b-b501-a801ff4682fe req-ad20d0dc-05ca-4f5e-ae99-39f65db2fcd9 service nova] Acquiring lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.715073] env[62627]: DEBUG oslo_concurrency.lockutils [req-0ab8d388-ca4e-404b-b501-a801ff4682fe req-ad20d0dc-05ca-4f5e-ae99-39f65db2fcd9 service nova] Acquired lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.715073] env[62627]: DEBUG nova.network.neutron [req-0ab8d388-ca4e-404b-b501-a801ff4682fe req-ad20d0dc-05ca-4f5e-ae99-39f65db2fcd9 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Refreshing network info cache for port 82de6dd8-1d57-45fc-9263-40c89eeacaa3 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1617.972451] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df7ce97-07b0-4239-a241-8d0346a9f69c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.980421] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd801ca-5474-4215-8262-e637a07295f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.009471] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090e6708-7562-4168-989f-ccad46ab461a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.016295] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6c5ee2-76e8-437b-b76f-fb79e2849359 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.028642] env[62627]: DEBUG nova.compute.provider_tree [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1618.473930] env[62627]: DEBUG nova.network.neutron [req-0ab8d388-ca4e-404b-b501-a801ff4682fe req-ad20d0dc-05ca-4f5e-ae99-39f65db2fcd9 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Updated VIF entry in instance network info cache for port 82de6dd8-1d57-45fc-9263-40c89eeacaa3. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1618.474327] env[62627]: DEBUG nova.network.neutron [req-0ab8d388-ca4e-404b-b501-a801ff4682fe req-ad20d0dc-05ca-4f5e-ae99-39f65db2fcd9 service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Updating instance_info_cache with network_info: [{"id": "82de6dd8-1d57-45fc-9263-40c89eeacaa3", "address": "fa:16:3e:56:28:96", "network": {"id": "da92dc57-06a7-46e5-a51b-1f0a8d7373a7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-952102388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b88e3a0d8ee4e3682bd312872dcbc0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a8f5363-be3a-4f92-9ccf-33bb0c8113b3", "external-id": "nsx-vlan-transportzone-944", "segmentation_id": 944, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82de6dd8-1d", "ovs_interfaceid": "82de6dd8-1d57-45fc-9263-40c89eeacaa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.561833] env[62627]: DEBUG nova.scheduler.client.report [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 183 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1618.562288] env[62627]: DEBUG nova.compute.provider_tree [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 183 to 184 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1618.562499] env[62627]: DEBUG nova.compute.provider_tree [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1618.977707] env[62627]: DEBUG oslo_concurrency.lockutils [req-0ab8d388-ca4e-404b-b501-a801ff4682fe req-ad20d0dc-05ca-4f5e-ae99-39f65db2fcd9 service nova] Releasing lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.067983] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.146s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.096252] env[62627]: INFO nova.network.neutron [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updating port 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1620.464181] env[62627]: DEBUG nova.compute.manager [req-9f85bf2b-6e6f-4af5-a585-41df5986011f req-77a184a5-b3a2-4b44-aec0-b9d3372f2c05 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Received event network-vif-plugged-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1620.464181] env[62627]: DEBUG oslo_concurrency.lockutils [req-9f85bf2b-6e6f-4af5-a585-41df5986011f req-77a184a5-b3a2-4b44-aec0-b9d3372f2c05 service nova] Acquiring lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.464181] env[62627]: DEBUG oslo_concurrency.lockutils [req-9f85bf2b-6e6f-4af5-a585-41df5986011f req-77a184a5-b3a2-4b44-aec0-b9d3372f2c05 service nova] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.464181] env[62627]: DEBUG oslo_concurrency.lockutils [req-9f85bf2b-6e6f-4af5-a585-41df5986011f req-77a184a5-b3a2-4b44-aec0-b9d3372f2c05 service nova] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.464181] env[62627]: DEBUG nova.compute.manager [req-9f85bf2b-6e6f-4af5-a585-41df5986011f req-77a184a5-b3a2-4b44-aec0-b9d3372f2c05 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] No waiting events found dispatching network-vif-plugged-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1620.464984] env[62627]: WARNING nova.compute.manager [req-9f85bf2b-6e6f-4af5-a585-41df5986011f req-77a184a5-b3a2-4b44-aec0-b9d3372f2c05 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Received unexpected event network-vif-plugged-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 for instance with vm_state shelved_offloaded and task_state spawning. [ 1620.546912] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.546912] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.546912] env[62627]: DEBUG nova.network.neutron [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Building network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1621.244817] env[62627]: DEBUG nova.network.neutron [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updating instance_info_cache with network_info: [{"id": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "address": "fa:16:3e:5f:eb:ca", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374cdd5a-3b", "ovs_interfaceid": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.747222] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.773595] env[62627]: DEBUG nova.virt.hardware [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-01-30T20:28:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='5693238734dadac73dfa8d825e919cdb',container_format='bare',created_at=2025-01-30T20:45:21Z,direct_url=,disk_format='vmdk',id=d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-598758281-shelved',owner='6095198e22f645698a7c566414eba47c',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-01-30T20:45:36Z,virtual_size=,visibility=), allow threads: False {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1621.773824] env[62627]: DEBUG nova.virt.hardware [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Flavor limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1621.773980] env[62627]: DEBUG nova.virt.hardware [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Image limits 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1621.774179] env[62627]: DEBUG nova.virt.hardware [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Flavor pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1621.774356] env[62627]: DEBUG nova.virt.hardware [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Image pref 0:0:0 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1621.774506] env[62627]: DEBUG nova.virt.hardware [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62627) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1621.774714] env[62627]: DEBUG nova.virt.hardware [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1621.774874] env[62627]: DEBUG nova.virt.hardware [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1621.775050] env[62627]: DEBUG nova.virt.hardware [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Got 1 possible topologies {{(pid=62627) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1621.775216] env[62627]: DEBUG nova.virt.hardware [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1621.775386] env[62627]: DEBUG nova.virt.hardware [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62627) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1621.776230] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978a90b0-6388-463c-be49-d991761358ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.784146] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8f1c20-8848-4d6f-81ac-b8a6a6623e4a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.797157] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:eb:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd38fdec-d092-4a84-ab41-685f6dbb4f29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '374cdd5a-3b25-49a6-a1a0-45d9d5a331c1', 'vif_model': 'vmxnet3'}] {{(pid=62627) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1621.804419] env[62627]: DEBUG oslo.service.loopingcall [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1621.804633] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Creating VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1621.804823] env[62627]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b6eae6d-a7aa-4247-8eeb-5756f9523ae3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.824635] env[62627]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1621.824635] env[62627]: value = "task-2195661" [ 1621.824635] env[62627]: _type = "Task" [ 1621.824635] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.831661] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195661, 'name': CreateVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.334104] env[62627]: DEBUG oslo_vmware.api [-] Task: {'id': task-2195661, 'name': CreateVM_Task, 'duration_secs': 0.263431} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.334264] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Created VM on the ESX host {{(pid=62627) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1622.334936] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.335168] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.335552] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1622.335827] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2b99918-0177-4ea8-8ebd-1ac4196b83f5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.339894] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1622.339894] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5276c87c-5d86-184b-87af-f4b86a5e0558" [ 1622.339894] env[62627]: _type = "Task" [ 1622.339894] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.347048] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5276c87c-5d86-184b-87af-f4b86a5e0558, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.487679] env[62627]: DEBUG nova.compute.manager [req-b0ae399a-6726-4c30-bb76-a772a41c1382 req-49367332-66ef-4528-8af1-f404cc798185 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Received event network-changed-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1622.487870] env[62627]: DEBUG nova.compute.manager [req-b0ae399a-6726-4c30-bb76-a772a41c1382 req-49367332-66ef-4528-8af1-f404cc798185 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Refreshing instance network info cache due to event network-changed-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1. {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1622.488096] env[62627]: DEBUG oslo_concurrency.lockutils [req-b0ae399a-6726-4c30-bb76-a772a41c1382 req-49367332-66ef-4528-8af1-f404cc798185 service nova] Acquiring lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.488244] env[62627]: DEBUG oslo_concurrency.lockutils [req-b0ae399a-6726-4c30-bb76-a772a41c1382 req-49367332-66ef-4528-8af1-f404cc798185 service nova] Acquired lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.488406] env[62627]: DEBUG nova.network.neutron [req-b0ae399a-6726-4c30-bb76-a772a41c1382 req-49367332-66ef-4528-8af1-f404cc798185 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Refreshing network info cache for port 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1622.850401] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.850769] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Processing image d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2 {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1622.850888] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.851032] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.851254] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1622.851521] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff70f53c-b30f-4752-b87a-22938eaa1350 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.868284] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1622.868519] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62627) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1622.869208] env[62627]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6aa263c-9961-47c0-a557-ada2ee3c43b6 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.874772] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1622.874772] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]5203de5f-86a5-1c30-8d78-c5b450762baa" [ 1622.874772] env[62627]: _type = "Task" [ 1622.874772] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.881765] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': session[520bf719-dc86-5bda-f3a2-48a743b520b7]5203de5f-86a5-1c30-8d78-c5b450762baa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.187595] env[62627]: DEBUG nova.network.neutron [req-b0ae399a-6726-4c30-bb76-a772a41c1382 req-49367332-66ef-4528-8af1-f404cc798185 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updated VIF entry in instance network info cache for port 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1. {{(pid=62627) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1623.187972] env[62627]: DEBUG nova.network.neutron [req-b0ae399a-6726-4c30-bb76-a772a41c1382 req-49367332-66ef-4528-8af1-f404cc798185 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updating instance_info_cache with network_info: [{"id": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "address": "fa:16:3e:5f:eb:ca", "network": {"id": "eb8841de-7dc6-4afa-ad35-670f3baa1354", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-428698998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6095198e22f645698a7c566414eba47c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap374cdd5a-3b", "ovs_interfaceid": "374cdd5a-3b25-49a6-a1a0-45d9d5a331c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.384410] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Preparing fetch location {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1623.384675] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Fetch image to [datastore2] OSTACK_IMG_e698a981-c347-4f37-bc2f-c03f286c868a/OSTACK_IMG_e698a981-c347-4f37-bc2f-c03f286c868a.vmdk {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1623.384828] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Downloading stream optimized image d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2 to [datastore2] OSTACK_IMG_e698a981-c347-4f37-bc2f-c03f286c868a/OSTACK_IMG_e698a981-c347-4f37-bc2f-c03f286c868a.vmdk on the data store datastore2 as vApp {{(pid=62627) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1623.384993] env[62627]: DEBUG nova.virt.vmwareapi.images [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Downloading image file data d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2 to the ESX as VM named 'OSTACK_IMG_e698a981-c347-4f37-bc2f-c03f286c868a' {{(pid=62627) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1623.454671] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1623.454671] env[62627]: value = "resgroup-9" [ 1623.454671] env[62627]: _type = "ResourcePool" [ 1623.454671] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1623.454901] env[62627]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-20af8125-ea61-4c9e-b509-26e089fcf1ed {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.476352] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lease: (returnval){ [ 1623.476352] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b954c2-4c8c-dadc-a5b4-48ca1b40c25e" [ 1623.476352] env[62627]: _type = "HttpNfcLease" [ 1623.476352] env[62627]: } obtained for vApp import into resource pool (val){ [ 1623.476352] env[62627]: value = "resgroup-9" [ 1623.476352] env[62627]: _type = "ResourcePool" [ 1623.476352] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1623.476745] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the lease: (returnval){ [ 1623.476745] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b954c2-4c8c-dadc-a5b4-48ca1b40c25e" [ 1623.476745] env[62627]: _type = "HttpNfcLease" [ 1623.476745] env[62627]: } to be ready. {{(pid=62627) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1623.482805] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1623.482805] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b954c2-4c8c-dadc-a5b4-48ca1b40c25e" [ 1623.482805] env[62627]: _type = "HttpNfcLease" [ 1623.482805] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1623.691373] env[62627]: DEBUG oslo_concurrency.lockutils [req-b0ae399a-6726-4c30-bb76-a772a41c1382 req-49367332-66ef-4528-8af1-f404cc798185 service nova] Releasing lock "refresh_cache-98c33a76-60af-4449-b06b-58c7b0ed7cc9" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.984552] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1623.984552] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b954c2-4c8c-dadc-a5b4-48ca1b40c25e" [ 1623.984552] env[62627]: _type = "HttpNfcLease" [ 1623.984552] env[62627]: } is initializing. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1624.485710] env[62627]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1624.485710] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b954c2-4c8c-dadc-a5b4-48ca1b40c25e" [ 1624.485710] env[62627]: _type = "HttpNfcLease" [ 1624.485710] env[62627]: } is ready. {{(pid=62627) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1624.486094] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1624.486094] env[62627]: value = "session[520bf719-dc86-5bda-f3a2-48a743b520b7]52b954c2-4c8c-dadc-a5b4-48ca1b40c25e" [ 1624.486094] env[62627]: _type = "HttpNfcLease" [ 1624.486094] env[62627]: }. {{(pid=62627) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1624.486691] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a35f6cc-76cd-4dfb-b499-ba7375961c99 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.493231] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab45a8-0600-1d79-a57d-1124fc6cd62f/disk-0.vmdk from lease info. {{(pid=62627) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1624.493397] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab45a8-0600-1d79-a57d-1124fc6cd62f/disk-0.vmdk. {{(pid=62627) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1624.555056] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-07083195-95b7-49e8-af7c-62a9a781267a {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.597833] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Completed reading data from the image iterator. {{(pid=62627) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1625.598316] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab45a8-0600-1d79-a57d-1124fc6cd62f/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1625.599022] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb6fc9a-41a3-46bc-85b4-d16f47430aa3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.606063] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab45a8-0600-1d79-a57d-1124fc6cd62f/disk-0.vmdk is in state: ready. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1625.606263] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab45a8-0600-1d79-a57d-1124fc6cd62f/disk-0.vmdk. {{(pid=62627) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1625.606497] env[62627]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-a911bf82-3f3a-4c7a-af4b-49596a580c72 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.801047] env[62627]: DEBUG oslo_vmware.rw_handles [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ab45a8-0600-1d79-a57d-1124fc6cd62f/disk-0.vmdk. {{(pid=62627) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1625.801232] env[62627]: INFO nova.virt.vmwareapi.images [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Downloaded image file data d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2 [ 1625.802078] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf4014b-b336-4666-8e90-edf3720a83cd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.817748] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22ed6c11-2ca4-4c41-801d-18c998c1c0cf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.901063] env[62627]: INFO nova.virt.vmwareapi.images [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] The imported VM was unregistered [ 1625.903336] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Caching image {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1625.903565] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Creating directory with path [datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1625.903840] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f0fa263-e372-4977-8045-9a60d3355d23 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.913381] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Created directory with path [datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2 {{(pid=62627) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1625.913556] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_e698a981-c347-4f37-bc2f-c03f286c868a/OSTACK_IMG_e698a981-c347-4f37-bc2f-c03f286c868a.vmdk to [datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2.vmdk. {{(pid=62627) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1625.913776] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-0947912d-0816-49ab-a27e-88bfc5faac46 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.919944] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1625.919944] env[62627]: value = "task-2195664" [ 1625.919944] env[62627]: _type = "Task" [ 1625.919944] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.928401] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195664, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.431105] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195664, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.931824] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195664, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.432880] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195664, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.933210] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195664, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.433274] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195664, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.117777} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.433535] env[62627]: INFO nova.virt.vmwareapi.ds_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_e698a981-c347-4f37-bc2f-c03f286c868a/OSTACK_IMG_e698a981-c347-4f37-bc2f-c03f286c868a.vmdk to [datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2.vmdk. [ 1628.433718] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Cleaning up location [datastore2] OSTACK_IMG_e698a981-c347-4f37-bc2f-c03f286c868a {{(pid=62627) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1628.433879] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_e698a981-c347-4f37-bc2f-c03f286c868a {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1628.434141] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bea68bf5-38aa-4491-b6e2-c5e359e4b3b3 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.440044] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1628.440044] env[62627]: value = "task-2195665" [ 1628.440044] env[62627]: _type = "Task" [ 1628.440044] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.448533] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195665, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.949981] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195665, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0325} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.950393] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1628.950432] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2.vmdk" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.950676] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2.vmdk to [datastore2] 98c33a76-60af-4449-b06b-58c7b0ed7cc9/98c33a76-60af-4449-b06b-58c7b0ed7cc9.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1628.950910] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94dc09d3-5b56-42b2-843d-d63303b6b4fa {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.957997] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1628.957997] env[62627]: value = "task-2195666" [ 1628.957997] env[62627]: _type = "Task" [ 1628.957997] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.965055] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.467711] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195666, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.890924] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.891195] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.891351] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1629.968983] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195666, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.470214] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195666, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.897681] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Didn't find any instances for network info cache update. {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1630.898041] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.898179] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.898399] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.898621] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.898815] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.899012] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.899192] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1630.899384] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.970199] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195666, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.402830] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.403146] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.403324] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.403482] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1631.404467] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59026312-8f54-414f-88cc-2ce08c5b58fb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.413445] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775e16ca-5542-48c8-9a0e-b7e8252f4bb7 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.428031] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa28a41-65bd-4eb1-ad1c-aa6a74f19565 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.435720] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12784c7e-0e40-49ff-a6e8-f93950449f76 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.466724] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180902MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1631.466940] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.467100] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.477275] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195666, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.977788] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195666, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.572918} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.978266] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2/d6d73dc4-0e36-445e-9ab0-e1d798cfb7e2.vmdk to [datastore2] 98c33a76-60af-4449-b06b-58c7b0ed7cc9/98c33a76-60af-4449-b06b-58c7b0ed7cc9.vmdk {{(pid=62627) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1631.979020] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b852054c-6bd3-43d6-929c-263932538730 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.002101] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 98c33a76-60af-4449-b06b-58c7b0ed7cc9/98c33a76-60af-4449-b06b-58c7b0ed7cc9.vmdk or device None with type streamOptimized {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1632.002828] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0e2c722-8034-447e-986f-f8c7af27c337 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.022695] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1632.022695] env[62627]: value = "task-2195667" [ 1632.022695] env[62627]: _type = "Task" [ 1632.022695] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.030548] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195667, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.519943] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance ef9f511a-3b2c-4871-b52a-4959e7d3390b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.520164] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance 98c33a76-60af-4449-b06b-58c7b0ed7cc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.520415] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1632.520532] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1632.534915] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195667, 'name': ReconfigVM_Task, 'duration_secs': 0.294211} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.537635] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 98c33a76-60af-4449-b06b-58c7b0ed7cc9/98c33a76-60af-4449-b06b-58c7b0ed7cc9.vmdk or device None with type streamOptimized {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1632.538519] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2329eb83-f963-45f8-86f0-d6e56ef0053c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.547137] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1632.547137] env[62627]: value = "task-2195668" [ 1632.547137] env[62627]: _type = "Task" [ 1632.547137] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.557952] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195668, 'name': Rename_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.574400] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3737ca49-ee09-4ba0-8d1e-02399b78c5a1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.580885] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1e67e4-6890-43a3-9551-69f3cbe69fa5 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.610570] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a57d9f-4f9f-41c2-acda-17a3446fa61c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.617125] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ccac04-6d4b-4650-8c0b-1b5660136cb1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.630058] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1633.057484] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195668, 'name': Rename_Task, 'duration_secs': 0.132196} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.057872] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Powering on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1633.057955] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b79c9e5-a3aa-44b0-ab93-23a4bd6def93 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.064147] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1633.064147] env[62627]: value = "task-2195669" [ 1633.064147] env[62627]: _type = "Task" [ 1633.064147] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.071164] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195669, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.161724] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 184 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1633.161962] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 184 to 185 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1633.162229] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1633.573941] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195669, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.667170] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1633.667445] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.200s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.074147] env[62627]: DEBUG oslo_vmware.api [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195669, 'name': PowerOnVM_Task, 'duration_secs': 0.646626} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.074498] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Powered on the VM {{(pid=62627) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1634.169340] env[62627]: DEBUG nova.compute.manager [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Checking state {{(pid=62627) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1634.170345] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d72fa1b-02b5-484c-be17-1eaf88a8d8ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.687330] env[62627]: DEBUG oslo_concurrency.lockutils [None req-c44ff66c-3e41-49f1-b60e-b3e09343644b tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.790s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.068309] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1654.838095] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.838405] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.341632] env[62627]: DEBUG nova.compute.utils [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1655.844375] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.905841] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.906316] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.906384] env[62627]: INFO nova.compute.manager [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Attaching volume 6b986297-b545-4e77-a5d5-04eeeffb5ebf to /dev/sdb [ 1656.936827] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b5a432-853c-4a7e-960a-00908838029c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.943850] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc5c245-8b69-427b-ac5a-27666fbe8c5e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.956630] env[62627]: DEBUG nova.virt.block_device [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Updating existing volume attachment record: 79755429-c236-487b-9a01-c5962e218bc1 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1661.499498] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1661.499738] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447878', 'volume_id': '6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'name': 'volume-6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef9f511a-3b2c-4871-b52a-4959e7d3390b', 'attached_at': '', 'detached_at': '', 'volume_id': '6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'serial': '6b986297-b545-4e77-a5d5-04eeeffb5ebf'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1661.500784] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b15ead2-c545-4979-bc2e-1ca0bf9ac395 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.517520] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51d00f0-437c-40d2-9052-27ea93bfb665 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.541983] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] volume-6b986297-b545-4e77-a5d5-04eeeffb5ebf/volume-6b986297-b545-4e77-a5d5-04eeeffb5ebf.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1661.542302] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-352bd094-dc58-4e5a-84e5-4d48ec9e6086 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.559332] env[62627]: DEBUG oslo_vmware.api [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1661.559332] env[62627]: value = "task-2195672" [ 1661.559332] env[62627]: _type = "Task" [ 1661.559332] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.566626] env[62627]: DEBUG oslo_vmware.api [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195672, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.069270] env[62627]: DEBUG oslo_vmware.api [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195672, 'name': ReconfigVM_Task, 'duration_secs': 0.315865} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.069588] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Reconfigured VM instance instance-0000007d to attach disk [datastore2] volume-6b986297-b545-4e77-a5d5-04eeeffb5ebf/volume-6b986297-b545-4e77-a5d5-04eeeffb5ebf.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1662.074204] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a9d7aba-81b9-47f2-adcd-2c05d901e87d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.088180] env[62627]: DEBUG oslo_vmware.api [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1662.088180] env[62627]: value = "task-2195673" [ 1662.088180] env[62627]: _type = "Task" [ 1662.088180] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.095459] env[62627]: DEBUG oslo_vmware.api [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195673, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.598311] env[62627]: DEBUG oslo_vmware.api [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195673, 'name': ReconfigVM_Task, 'duration_secs': 0.136137} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.598634] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447878', 'volume_id': '6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'name': 'volume-6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef9f511a-3b2c-4871-b52a-4959e7d3390b', 'attached_at': '', 'detached_at': '', 'volume_id': '6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'serial': '6b986297-b545-4e77-a5d5-04eeeffb5ebf'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1663.633983] env[62627]: DEBUG nova.objects.instance [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'flavor' on Instance uuid ef9f511a-3b2c-4871-b52a-4959e7d3390b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1664.141665] env[62627]: DEBUG oslo_concurrency.lockutils [None req-e068f2ef-25a2-40c0-be9f-e2ff6063b426 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.235s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.977273] env[62627]: DEBUG oslo_concurrency.lockutils [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.977559] env[62627]: DEBUG oslo_concurrency.lockutils [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.480699] env[62627]: DEBUG nova.compute.utils [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Using /dev/sd instead of None {{(pid=62627) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1665.983705] env[62627]: DEBUG oslo_concurrency.lockutils [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.036791] env[62627]: DEBUG oslo_concurrency.lockutils [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.037211] env[62627]: DEBUG oslo_concurrency.lockutils [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.037322] env[62627]: INFO nova.compute.manager [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Attaching volume 6f6a329b-5ffc-4319-bd8e-d7bb1bb990be to /dev/sdc [ 1667.068122] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ff5ee8-1b3e-400f-888d-e882d8778540 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.075591] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8fa255-70cf-4c4a-9357-98fe4479440e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.088711] env[62627]: DEBUG nova.virt.block_device [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Updating existing volume attachment record: 61ad108f-6d56-4b33-8e72-4edc0c0ed664 {{(pid=62627) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1671.631206] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Volume attach. Driver type: vmdk {{(pid=62627) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1671.631467] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447879', 'volume_id': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'name': 'volume-6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef9f511a-3b2c-4871-b52a-4959e7d3390b', 'attached_at': '', 'detached_at': '', 'volume_id': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'serial': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1671.632328] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5bcaa1f-d093-40eb-9e38-3777910140dc {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.648633] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d8536c-8181-4635-b248-5662276f1e8d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.674720] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] volume-6f6a329b-5ffc-4319-bd8e-d7bb1bb990be/volume-6f6a329b-5ffc-4319-bd8e-d7bb1bb990be.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1671.674972] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58267580-5f74-4f62-af6f-55071f075cce {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.692364] env[62627]: DEBUG oslo_vmware.api [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1671.692364] env[62627]: value = "task-2195676" [ 1671.692364] env[62627]: _type = "Task" [ 1671.692364] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.699793] env[62627]: DEBUG oslo_vmware.api [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195676, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.950653] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.951031] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.951314] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.951543] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.951751] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.954344] env[62627]: INFO nova.compute.manager [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Terminating instance [ 1672.201951] env[62627]: DEBUG oslo_vmware.api [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195676, 'name': ReconfigVM_Task, 'duration_secs': 0.338294} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.202168] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Reconfigured VM instance instance-0000007d to attach disk [datastore2] volume-6f6a329b-5ffc-4319-bd8e-d7bb1bb990be/volume-6f6a329b-5ffc-4319-bd8e-d7bb1bb990be.vmdk or device None with type thin {{(pid=62627) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1672.206816] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-973b3ab0-e777-4f46-ac22-a181a7488488 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.220981] env[62627]: DEBUG oslo_vmware.api [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1672.220981] env[62627]: value = "task-2195677" [ 1672.220981] env[62627]: _type = "Task" [ 1672.220981] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.230196] env[62627]: DEBUG oslo_vmware.api [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195677, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.458522] env[62627]: DEBUG nova.compute.manager [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1672.458720] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1672.459621] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093d00de-9fd3-472e-a9fe-f2957a58ebdf {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.466856] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1672.467095] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb8c5c74-7d30-4058-9e39-25b3759bce4e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.473075] env[62627]: DEBUG oslo_vmware.api [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1672.473075] env[62627]: value = "task-2195678" [ 1672.473075] env[62627]: _type = "Task" [ 1672.473075] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.480143] env[62627]: DEBUG oslo_vmware.api [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195678, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.731242] env[62627]: DEBUG oslo_vmware.api [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195677, 'name': ReconfigVM_Task, 'duration_secs': 0.126221} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.731577] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447879', 'volume_id': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'name': 'volume-6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef9f511a-3b2c-4871-b52a-4959e7d3390b', 'attached_at': '', 'detached_at': '', 'volume_id': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'serial': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be'} {{(pid=62627) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1672.983065] env[62627]: DEBUG oslo_vmware.api [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195678, 'name': PowerOffVM_Task, 'duration_secs': 0.1883} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.983626] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1672.983936] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1672.984314] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d093a6f-afca-4207-bc65-fdcdd3420b11 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.043063] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1673.043229] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1673.043370] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleting the datastore file [datastore2] 98c33a76-60af-4449-b06b-58c7b0ed7cc9 {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1673.043637] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35e93a55-69aa-4794-871d-978e9a8d1502 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.049278] env[62627]: DEBUG oslo_vmware.api [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for the task: (returnval){ [ 1673.049278] env[62627]: value = "task-2195680" [ 1673.049278] env[62627]: _type = "Task" [ 1673.049278] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.056535] env[62627]: DEBUG oslo_vmware.api [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195680, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.559181] env[62627]: DEBUG oslo_vmware.api [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Task: {'id': task-2195680, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129317} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.559387] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1673.559640] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1673.559859] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1673.560083] env[62627]: INFO nova.compute.manager [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1673.560400] env[62627]: DEBUG oslo.service.loopingcall [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.560645] env[62627]: DEBUG nova.compute.manager [-] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1673.560767] env[62627]: DEBUG nova.network.neutron [-] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1673.767403] env[62627]: DEBUG nova.objects.instance [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'flavor' on Instance uuid ef9f511a-3b2c-4871-b52a-4959e7d3390b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1674.132920] env[62627]: DEBUG nova.compute.manager [req-0d4d5f20-73b6-45de-9e2a-426deadc73f8 req-752a0660-ed26-4293-826a-2da19abeb5f6 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Received event network-vif-deleted-374cdd5a-3b25-49a6-a1a0-45d9d5a331c1 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1674.133139] env[62627]: INFO nova.compute.manager [req-0d4d5f20-73b6-45de-9e2a-426deadc73f8 req-752a0660-ed26-4293-826a-2da19abeb5f6 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Neutron deleted interface 374cdd5a-3b25-49a6-a1a0-45d9d5a331c1; detaching it from the instance and deleting it from the info cache [ 1674.133454] env[62627]: DEBUG nova.network.neutron [req-0d4d5f20-73b6-45de-9e2a-426deadc73f8 req-752a0660-ed26-4293-826a-2da19abeb5f6 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.272181] env[62627]: DEBUG oslo_concurrency.lockutils [None req-214289cc-9fcd-48b5-a7eb-46836c76fae4 tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.234s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.544598] env[62627]: DEBUG nova.network.neutron [-] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.603707] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.603977] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.635652] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a379ce8f-b467-4a0e-bdeb-c588b8cff1c1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.645628] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f768da6-1a0f-412a-b69a-fe9e9d8650f4 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.670442] env[62627]: DEBUG nova.compute.manager [req-0d4d5f20-73b6-45de-9e2a-426deadc73f8 req-752a0660-ed26-4293-826a-2da19abeb5f6 service nova] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Detach interface failed, port_id=374cdd5a-3b25-49a6-a1a0-45d9d5a331c1, reason: Instance 98c33a76-60af-4449-b06b-58c7b0ed7cc9 could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1675.047907] env[62627]: INFO nova.compute.manager [-] [instance: 98c33a76-60af-4449-b06b-58c7b0ed7cc9] Took 1.49 seconds to deallocate network for instance. [ 1675.107000] env[62627]: INFO nova.compute.manager [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Detaching volume 6b986297-b545-4e77-a5d5-04eeeffb5ebf [ 1675.142697] env[62627]: INFO nova.virt.block_device [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Attempting to driver detach volume 6b986297-b545-4e77-a5d5-04eeeffb5ebf from mountpoint /dev/sdb [ 1675.142930] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1675.143184] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447878', 'volume_id': '6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'name': 'volume-6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef9f511a-3b2c-4871-b52a-4959e7d3390b', 'attached_at': '', 'detached_at': '', 'volume_id': '6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'serial': '6b986297-b545-4e77-a5d5-04eeeffb5ebf'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1675.144034] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924ee421-695c-4323-b983-47f0bfb28a7d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.168259] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c4d325-716a-453b-8918-ebf3d807395d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.175643] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82b67de-888b-4b94-8d50-015ab5275ecd {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.208435] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad185780-dc3a-45b0-908b-217f39215033 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.232694] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] The volume has not been displaced from its original location: [datastore2] volume-6b986297-b545-4e77-a5d5-04eeeffb5ebf/volume-6b986297-b545-4e77-a5d5-04eeeffb5ebf.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1675.240314] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Reconfiguring VM instance instance-0000007d to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1675.240570] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7672b8d8-7f59-4a1f-bd42-522a98495327 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.258544] env[62627]: DEBUG oslo_vmware.api [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1675.258544] env[62627]: value = "task-2195681" [ 1675.258544] env[62627]: _type = "Task" [ 1675.258544] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.265917] env[62627]: DEBUG oslo_vmware.api [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195681, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.554269] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.554534] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.554766] env[62627]: DEBUG nova.objects.instance [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lazy-loading 'resources' on Instance uuid 98c33a76-60af-4449-b06b-58c7b0ed7cc9 {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1675.768098] env[62627]: DEBUG oslo_vmware.api [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195681, 'name': ReconfigVM_Task, 'duration_secs': 0.216115} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.768373] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Reconfigured VM instance instance-0000007d to detach disk 2001 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1675.772984] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db22b938-a6bb-4bec-ba91-c4ac9e0129eb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.787572] env[62627]: DEBUG oslo_vmware.api [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1675.787572] env[62627]: value = "task-2195682" [ 1675.787572] env[62627]: _type = "Task" [ 1675.787572] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.795382] env[62627]: DEBUG oslo_vmware.api [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195682, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.097526] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b73af4-6647-473e-aac9-56c79378ec1c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.105126] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e2ace9-89bc-4e7f-a86b-710147c5c264 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.134165] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ced1805-3e67-41f0-b150-07c02f15e6ae {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.143176] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90a0ee8-60d4-49e7-90a0-b3bb2944bcc0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.156088] env[62627]: DEBUG nova.compute.provider_tree [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1676.298354] env[62627]: DEBUG oslo_vmware.api [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195682, 'name': ReconfigVM_Task, 'duration_secs': 0.127382} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.298626] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447878', 'volume_id': '6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'name': 'volume-6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef9f511a-3b2c-4871-b52a-4959e7d3390b', 'attached_at': '', 'detached_at': '', 'volume_id': '6b986297-b545-4e77-a5d5-04eeeffb5ebf', 'serial': '6b986297-b545-4e77-a5d5-04eeeffb5ebf'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1676.659846] env[62627]: DEBUG nova.scheduler.client.report [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1676.836949] env[62627]: DEBUG nova.objects.instance [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'flavor' on Instance uuid ef9f511a-3b2c-4871-b52a-4959e7d3390b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1677.164594] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.610s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.185942] env[62627]: INFO nova.scheduler.client.report [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Deleted allocations for instance 98c33a76-60af-4449-b06b-58c7b0ed7cc9 [ 1677.694908] env[62627]: DEBUG oslo_concurrency.lockutils [None req-f05aea15-6dcd-412f-8cd8-9c65148f6103 tempest-AttachVolumeShelveTestJSON-1329357808 tempest-AttachVolumeShelveTestJSON-1329357808-project-member] Lock "98c33a76-60af-4449-b06b-58c7b0ed7cc9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.744s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.843657] env[62627]: DEBUG oslo_concurrency.lockutils [None req-cb4e26be-6884-43cd-a41b-0024856586ee tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.239s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.849595] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.849820] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.352234] env[62627]: INFO nova.compute.manager [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Detaching volume 6f6a329b-5ffc-4319-bd8e-d7bb1bb990be [ 1678.389065] env[62627]: INFO nova.virt.block_device [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Attempting to driver detach volume 6f6a329b-5ffc-4319-bd8e-d7bb1bb990be from mountpoint /dev/sdc [ 1678.389325] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Volume detach. Driver type: vmdk {{(pid=62627) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1678.389524] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447879', 'volume_id': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'name': 'volume-6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef9f511a-3b2c-4871-b52a-4959e7d3390b', 'attached_at': '', 'detached_at': '', 'volume_id': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'serial': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1678.391029] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d88e64e-8cb6-4d16-a357-913811a71ef1 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.414319] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb513ba9-6af0-453b-8e22-a1b6ac3901c0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.422256] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0c34c0-f8b5-4b83-8b95-9bf3cb85cb2c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.443298] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fcdf2f6-802f-4f42-aa7f-3e303a48426f {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.460010] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] The volume has not been displaced from its original location: [datastore2] volume-6f6a329b-5ffc-4319-bd8e-d7bb1bb990be/volume-6f6a329b-5ffc-4319-bd8e-d7bb1bb990be.vmdk. No consolidation needed. {{(pid=62627) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1678.465526] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Reconfiguring VM instance instance-0000007d to detach disk 2002 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1678.465878] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc8ca3bc-40fe-47b8-b724-905cd7f0933c {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.484622] env[62627]: DEBUG oslo_vmware.api [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1678.484622] env[62627]: value = "task-2195683" [ 1678.484622] env[62627]: _type = "Task" [ 1678.484622] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.492626] env[62627]: DEBUG oslo_vmware.api [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195683, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.796726] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1678.994675] env[62627]: DEBUG oslo_vmware.api [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195683, 'name': ReconfigVM_Task, 'duration_secs': 0.214169} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.994956] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Reconfigured VM instance instance-0000007d to detach disk 2002 {{(pid=62627) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1678.999432] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f7e7142-bbc9-468c-8240-ae1e1d6bba6b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.013813] env[62627]: DEBUG oslo_vmware.api [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1679.013813] env[62627]: value = "task-2195684" [ 1679.013813] env[62627]: _type = "Task" [ 1679.013813] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.021046] env[62627]: DEBUG oslo_vmware.api [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195684, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.296667] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1679.524450] env[62627]: DEBUG oslo_vmware.api [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195684, 'name': ReconfigVM_Task, 'duration_secs': 0.129264} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.524795] env[62627]: DEBUG nova.virt.vmwareapi.volumeops [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-447879', 'volume_id': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'name': 'volume-6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef9f511a-3b2c-4871-b52a-4959e7d3390b', 'attached_at': '', 'detached_at': '', 'volume_id': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be', 'serial': '6f6a329b-5ffc-4319-bd8e-d7bb1bb990be'} {{(pid=62627) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1680.064345] env[62627]: DEBUG nova.objects.instance [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'flavor' on Instance uuid ef9f511a-3b2c-4871-b52a-4959e7d3390b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1681.070879] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1551b318-91cb-436b-83b1-f2de2579759c tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.221s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.230379] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.230775] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.230833] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.231027] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.231204] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.233369] env[62627]: INFO nova.compute.manager [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Terminating instance [ 1682.296500] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1682.737103] env[62627]: DEBUG nova.compute.manager [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Start destroying the instance on the hypervisor. {{(pid=62627) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1682.737342] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Destroying instance {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1682.738241] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04c0bf7-7f88-46be-ab6f-7f418c76629e {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.746367] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Powering off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1682.746589] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c72cc74-40cc-4c87-9300-b11c8985c699 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.753131] env[62627]: DEBUG oslo_vmware.api [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1682.753131] env[62627]: value = "task-2195686" [ 1682.753131] env[62627]: _type = "Task" [ 1682.753131] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.762336] env[62627]: DEBUG oslo_vmware.api [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.799570] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.799797] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.799964] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.800141] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62627) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1682.801116] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845c8744-50a4-4c44-8576-06f51b67b185 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.809022] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533f1385-7c4e-4ef4-b0c8-58abcc788252 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.823781] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09095fdf-1301-4624-9045-d810134e07bb {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.830120] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f00c385-a4c7-48c8-bbec-421b8ac306ba {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.859162] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180998MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=62627) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1682.859371] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.859700] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.263415] env[62627]: DEBUG oslo_vmware.api [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195686, 'name': PowerOffVM_Task, 'duration_secs': 0.210446} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.263856] env[62627]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Powered off the VM {{(pid=62627) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1683.263856] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Unregistering the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1683.264124] env[62627]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6feaf4ec-ed23-41d6-9eb1-bba46a5e991d {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.333934] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Unregistered the VM {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1683.334172] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Deleting contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1683.334351] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Deleting the datastore file [datastore2] ef9f511a-3b2c-4871-b52a-4959e7d3390b {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1683.334603] env[62627]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b0e095e-0cb9-499d-afc9-063efe9d7509 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.340255] env[62627]: DEBUG oslo_vmware.api [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for the task: (returnval){ [ 1683.340255] env[62627]: value = "task-2195688" [ 1683.340255] env[62627]: _type = "Task" [ 1683.340255] env[62627]: } to complete. {{(pid=62627) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.347779] env[62627]: DEBUG oslo_vmware.api [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.850879] env[62627]: DEBUG oslo_vmware.api [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Task: {'id': task-2195688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138907} completed successfully. {{(pid=62627) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.851911] env[62627]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Deleted the datastore file {{(pid=62627) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.852146] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Deleted contents of the VM from datastore datastore2 {{(pid=62627) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1683.852338] env[62627]: DEBUG nova.virt.vmwareapi.vmops [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Instance destroyed {{(pid=62627) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1683.852548] env[62627]: INFO nova.compute.manager [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1683.852811] env[62627]: DEBUG oslo.service.loopingcall [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62627) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.853009] env[62627]: DEBUG nova.compute.manager [-] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Deallocating network for instance {{(pid=62627) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1683.853111] env[62627]: DEBUG nova.network.neutron [-] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] deallocate_for_instance() {{(pid=62627) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1683.889681] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Instance ef9f511a-3b2c-4871-b52a-4959e7d3390b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62627) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1683.890683] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1683.890683] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62627) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1683.921133] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e344fc-fb91-4c99-8758-02525b506d19 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.929598] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0fbad6-191c-4bf3-b3b1-5caa3f054528 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.962526] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1e791c-c195-49f0-bcf5-c3b6f64ac400 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.969878] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1db54f-c6f8-42e3-9c28-5e0a78a6b111 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.983792] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1684.439539] env[62627]: DEBUG nova.compute.manager [req-174c3f6e-5c23-4d59-b6cc-b1e99fea1535 req-8231f5fc-fb92-44e5-81e2-a2d1c090c4de service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Received event network-vif-deleted-82de6dd8-1d57-45fc-9263-40c89eeacaa3 {{(pid=62627) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1684.439827] env[62627]: INFO nova.compute.manager [req-174c3f6e-5c23-4d59-b6cc-b1e99fea1535 req-8231f5fc-fb92-44e5-81e2-a2d1c090c4de service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Neutron deleted interface 82de6dd8-1d57-45fc-9263-40c89eeacaa3; detaching it from the instance and deleting it from the info cache [ 1684.439901] env[62627]: DEBUG nova.network.neutron [req-174c3f6e-5c23-4d59-b6cc-b1e99fea1535 req-8231f5fc-fb92-44e5-81e2-a2d1c090c4de service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.520215] env[62627]: DEBUG nova.scheduler.client.report [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Updated inventory for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with generation 185 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1684.520403] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Updating resource provider c17e7f32-68d8-4e2f-8979-15d701de02f7 generation from 185 to 186 during operation: update_inventory {{(pid=62627) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1684.520560] env[62627]: DEBUG nova.compute.provider_tree [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Updating inventory in ProviderTree for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1684.916261] env[62627]: DEBUG nova.network.neutron [-] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.942326] env[62627]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ea9a08d-6cb5-48c7-801f-3166ac02ee03 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.952982] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57d2797-fbe9-4d01-b166-70e3fcb453ab {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.978705] env[62627]: DEBUG nova.compute.manager [req-174c3f6e-5c23-4d59-b6cc-b1e99fea1535 req-8231f5fc-fb92-44e5-81e2-a2d1c090c4de service nova] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Detach interface failed, port_id=82de6dd8-1d57-45fc-9263-40c89eeacaa3, reason: Instance ef9f511a-3b2c-4871-b52a-4959e7d3390b could not be found. {{(pid=62627) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1685.025049] env[62627]: DEBUG nova.compute.resource_tracker [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62627) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1685.025234] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.166s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.419410] env[62627]: INFO nova.compute.manager [-] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Took 1.57 seconds to deallocate network for instance. [ 1685.926452] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.926880] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.927025] env[62627]: DEBUG nova.objects.instance [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lazy-loading 'resources' on Instance uuid ef9f511a-3b2c-4871-b52a-4959e7d3390b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1686.460400] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57bc6853-3298-4ff9-a534-7aed106bcc0b {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.468943] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b719031e-ff96-44c9-9ec7-89e9b20cefb0 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.498991] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc829552-6e30-4ea4-9b87-ae70f58fde50 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.506118] env[62627]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0a8f6f-f53a-4fab-9545-472d0f969860 {{(pid=62627) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.519034] env[62627]: DEBUG nova.compute.provider_tree [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Inventory has not changed in ProviderTree for provider: c17e7f32-68d8-4e2f-8979-15d701de02f7 {{(pid=62627) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1687.022120] env[62627]: DEBUG nova.scheduler.client.report [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Inventory has not changed for provider c17e7f32-68d8-4e2f-8979-15d701de02f7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62627) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1687.026041] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1687.026165] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1687.026267] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1687.026391] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62627) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1687.296872] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1687.297106] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Starting heal instance info cache {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1687.297158] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Rebuilding the list of instances to heal {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1687.527522] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.544448] env[62627]: INFO nova.scheduler.client.report [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Deleted allocations for instance ef9f511a-3b2c-4871-b52a-4959e7d3390b [ 1687.817154] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquiring lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.817349] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Acquired lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.817466] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Forcefully refreshing network info cache for instance {{(pid=62627) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1687.817620] env[62627]: DEBUG nova.objects.instance [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Lazy-loading 'info_cache' on Instance uuid ef9f511a-3b2c-4871-b52a-4959e7d3390b {{(pid=62627) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1688.051965] env[62627]: DEBUG oslo_concurrency.lockutils [None req-2c48bb38-d9f6-4e55-bf7e-d444200b064a tempest-AttachVolumeTestJSON-1790956899 tempest-AttachVolumeTestJSON-1790956899-project-member] Lock "ef9f511a-3b2c-4871-b52a-4959e7d3390b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.821s {{(pid=62627) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.824065] env[62627]: DEBUG nova.compute.utils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Can not refresh info_cache because instance was not found {{(pid=62627) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1055}} [ 1688.840141] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Instance cache missing network info. {{(pid=62627) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1689.414491] env[62627]: DEBUG nova.network.neutron [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Updating instance_info_cache with network_info: [] {{(pid=62627) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.916705] env[62627]: DEBUG oslo_concurrency.lockutils [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Releasing lock "refresh_cache-ef9f511a-3b2c-4871-b52a-4959e7d3390b" {{(pid=62627) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.916929] env[62627]: DEBUG nova.compute.manager [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] [instance: ef9f511a-3b2c-4871-b52a-4959e7d3390b] Updated the network info_cache for instance {{(pid=62627) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1689.917165] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1690.296758] env[62627]: DEBUG oslo_service.periodic_task [None req-1fc49b59-e3e3-4037-8e69-b44f0a403044 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62627) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}